Social Utility Modeling



Tutorial 4 - Li et al., 2022

1.4 Model the Data Generation Process

First let's define the functions
function d_Inequality = inequality(a1, b1, a2, b2)
d_Inequality = abs(a2 - b2) - abs(a1 - b1);
end
 
function d_lossAdvantaged = harm(a0, b0, a1, b1, a2, b2)
initial = [a0, b0];
choice1 = [a1, b1];
choice2 = [a2, b2];
if a0 == b0
advantaged = 1;
else
[~, advantaged] = max(initial);
end
d_lossAdvantaged = (initial(advantaged) - choice1(advantaged)) - (initial(advantaged) - choice2(advantaged));
end
 
function rankReverseDiff = rankReverse(a0, b0, a1, b1, a2, b2)
if a0 == b0
rankReverseDiff = 0;
return;
end
d_initial = a0 - b0;
d_choice1 = a1 - b1;
d_choice2 = a2 - b2;
choice1Reversed = 0;
choice2Reversed = 0;
if d_initial > 0
if d_choice1 < 0, choice1Reversed = 1; end
if d_choice2 < 0, choice2Reversed = 1; end
elseif d_initial < 0
if d_choice1 > 0, choice1Reversed = 1; end
if d_choice2 > 0, choice2Reversed = 1; end
end
rankReverseDiff = choice1Reversed - choice2Reversed;
end
Now let's check and see if they do what we want. Let's make an example trial:
example = table(20, 0, 8, 12, 15, 5, 'VariableNames', ["a0", "b0", "a1", "b1", "a2", "b2"])
example = 1×6 table
 a0b0a1b1a2b2
1200812155
Now get the outputs based on this example
inequality(example.a1(1), example.b1(1), example.a2(1), example.b2(1))
ans = 6
harm(example.a0(1), example.b0(1), example.a1(1), example.b1(1), example.a2(1), example.b2(1))
ans = 7
rankReverse(example.a0(1), example.b0(1), example.a1(1), example.b1(1), example.a2(1), example.b2(1))
ans = 1

1.5 Simulating Data

Now let's preallocate and define functions, triallist, and parameters
a0 = randi([10,20], 100, 1);
b0 = 20 - a0;
trialList = table(a0, b0);
clear a0 b0
 
for i = 1:length(trialList.a0)
trialList.a1(i) = randi([5, trialList.a0(i)]);
end
trialList.b1 = 20 - trialList.a1;
 
for i = 1:length(trialList.a0)
trialList.a2(i) = randi([5, trialList.a0(i)]);
if trialList.a2(i) == trialList.a1(i)
trialList.a2(i) = 10;
end
end
trialList.b2 = 20 - trialList.a2;
trialList = [trialList; trialList(:, [2, 1, 4, 3, 6, 5])]
trialList = 200×6 table
 a0b0a1b1a2b2
1182713146
21911641010
31197131010
42001371010
5164614911
6119911119
7137713128
8164128119
9200164146
10200164146
11119812614
12200614911
13200812128
14155155713
 
function util = utility(pars, IVs)
IVs = double(IVs);
a0 = IVs(1); b0 = IVs(2); a1 = IVs(3); b1 = IVs(4); a2 = IVs(5); b2 = IVs(6);
alpha = pars(1); delta = pars(2); rho = pars(3);
util = (alpha * inequality(a1, b1, a2, b2)) - (delta * harm(a0, b0, a1, b1, a2, b2)) - (rho * rankReverse(a0, b0, a1, b1, a2, b2));
end
 
function prob = probability(pars, utilitydiff)
beta = pars(end-2); epsilon = pars(end-1); gamma = pars(end);
prob = 1 / (1 + exp(-(beta * utilitydiff)));
prob = prob * (1 - 2 * epsilon) + epsilon + gamma * (2 * epsilon);
prob = max(min(prob, 0.9999999999), 0.00000000001);
end
 
freeParameters = struct();
mainpars = (0:3)./2;
bet = (0:5:5);
eps = (0:0.1:0.5);
gam = (-0.5:0.5:0);
for i = 1:length(mainpars)
for j = 1:length(mainpars)
for k = 1:length(mainpars)
for l = 1:length(bet)
for m = 1:length(eps)
for n = 1:length(gam)
freeParameters(i, j, k, l, m, n).alpha = mainpars(i) + rand(1,1)*0.5;
freeParameters(i, j, k, l, m, n).delta = mainpars(j) + rand(1,1)*0.5;
freeParameters(i, j, k, l, m, n).rho = mainpars(k) + rand(1,1)*0.5;
freeParameters(i, j, k, l, m, n).beta = bet(l).*rand(1,1)*5;
freeParameters(i, j, k, l, m, n).epsilon = eps(m);
freeParameters(i, j, k, l, m, n).gamma = gam(n) + rand(1, 1)*0.5;
end
end
end
end
end
end
 
function pred = generatePredictions(parameters, df)
pred = zeros(size(df, 1), 1);
for i = 1:size(df, 1)
thisTrialIVs = table2array(df(i, :));
utilityDiff = utility(parameters, thisTrialIVs);
pred(i) = probability(parameters, utilityDiff);
end
end
Now that all of that's done, let's generate predictions
for i = 1:length(mainpars)
for j = 1:length(mainpars)
for k = 1:length(mainpars)
for l = 1:length(bet)
for m = 1:length(eps)
for n = 1:length(gam)
pars = [freeParameters(i, j, k, l, m, n).alpha, freeParameters(i, j, k, l, m, n).delta, freeParameters(i, j, k, l, m, n).rho, freeParameters(i, j, k, l, m, n).beta, freeParameters(i, j, k, l, m, n).epsilon, freeParameters(i, j, k, l, m, n).gamma];
freeParameters(i, j, k, l, m, n).predictions = generatePredictions(pars, trialList);
end
end
end
end
end
end

1.6 Compare Recovered Parameters

Let's write the objective function
function obj_val = obj_function(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
Prob1 = generatePredictions(params, df);
Chose1 = table2array(df(:, 7));
if strcmp(optimMethod, 'OLS')
obj_val = sum((Chose1 - Prob1) .^ 2);
else % MLE
obj_val = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(Prob1));
end
end
Now we can set up the optimizer (we'll suppress the output of successful completion to avoid inundating the output)
initial_params = [1, 1, 1, 4, 0.25, 0];
lower_bounds = [0, 0, 0, 0, 0, -0.5];
upper_bounds = [2, 2, 2, 10, 0.5, 0.5];
 
function result = optimize(obj, initial_params, lower_bounds, upper_bounds, df)
 
try
result = fmincon(@(x) obj(x, df, 'OLS'), initial_params, [], [], [], [], lower_bounds, upper_bounds, [], optimoptions('fmincon', 'Display', 'off'));
catch
result = fmincon(@(x) obj(x, df, 'MLE'), initial_params, [], [], [], [], lower_bounds, upper_bounds, [], optimoptions('fmincon', 'Display', 'off'));
end
end
And this lets us recover the free parameters
for i = 1:length(mainpars)
for j = 1:length(mainpars)
for k = 1:length(mainpars)
for l = 1:length(bet)
for m = 1:length(eps)
for n = 1:length(gam)
trialList.Chose1(:) = round(freeParameters(i, j, k, l, m, n).predictions .* 1000) > randi([1, 1000], length(freeParameters(i, j, k, l, m, n).predictions), 1);
result = optimize(@obj_function, initial_params, lower_bounds, upper_bounds, trialList);
freeParameters(i, j, k, l, m, n).alphaRecovered = result(1);
freeParameters(i, j, k, l, m, n).deltaRecovered = result(2);
freeParameters(i, j, k, l, m, n).rhoRecovered = result(3);
freeParameters(i, j, k, l, m, n).betaRecovered = result(4);
freeParameters(i, j, k, l, m, n).epsilonRecovered = result(5);
freeParameters(i, j, k, l, m, n).gammaRecovered = result(6);
end
end
end
end
end
end
Let's extract values to format in a plottable way
totalIterations = length(mainpars)^3 * length(bet) * length(eps) * length(gam);
parsPlot.alpha = NaN(totalIterations, 1);
parsPlot.alphaRecovered = NaN(totalIterations, 1);
parsPlot.delta = NaN(totalIterations, 1);
parsPlot.deltaRecovered = NaN(totalIterations, 1);
parsPlot.rho = NaN(totalIterations, 1);
parsPlot.rhoRecovered = NaN(totalIterations, 1);
parsPlot.beta = NaN(totalIterations, 1);
parsPlot.betaRecovered = NaN(totalIterations, 1);
parsPlot.epsilon = NaN(totalIterations, 1);
parsPlot.epsilonRecovered = NaN(totalIterations, 1);
parsPlot.gamma = NaN(totalIterations, 1);
parsPlot.gammaRecovered = NaN(totalIterations, 1);
for i = 1:length(mainpars)
for j = 1:length(mainpars)
for k = 1:length(mainpars)
for l = 1:length(bet)
for m = 1:length(eps)
for n = 1:length(gam)
parsPlot.alpha(end+1) = freeParameters(i, j, k, l, m, n).alpha;
parsPlot.alphaRecovered(end+1) = freeParameters(i, j, k, l, m, n).alphaRecovered;
parsPlot.delta(end+1) = freeParameters(i, j, k, l, m, n).delta;
parsPlot.deltaRecovered(end+1) = freeParameters(i, j, k, l, m, n).deltaRecovered;
parsPlot.rho(end+1) = freeParameters(i, j, k, l, m, n).rho;
parsPlot.rhoRecovered(end+1) = freeParameters(i, j, k, l, m, n).rhoRecovered;
parsPlot.beta(end+1) = freeParameters(i, j, k, l, m, n).beta;
parsPlot.betaRecovered(end+1) = freeParameters(i, j, k, l, m, n).betaRecovered;
parsPlot.epsilon(end+1) = freeParameters(i, j, k, l, m, n).epsilon;
parsPlot.epsilonRecovered(end+1) = freeParameters(i, j, k, l, m, n).epsilonRecovered;
parsPlot.gamma(end+1) = freeParameters(i, j, k, l, m, n).gamma;
parsPlot.gammaRecovered(end+1) = freeParameters(i, j, k, l, m, n).gammaRecovered;
end
end
end
end
end
end
So we can now verify that we can reliably recover them using plots
parsPlot.Epsilon = categorical(parsPlot.epsilon);
 
figure;
scatter(parsPlot.alpha, parsPlot.alphaRecovered, [], parsPlot.Epsilon);
hold on; lsline; plot(xlim, ylim, '--k'); hold off;
title('Alpha'); xlabel('Actual'); ylabel('Recovered');
 
figure;
scatter(parsPlot.delta, parsPlot.deltaRecovered, [], parsPlot.Epsilon);
hold on; lsline; plot(xlim, ylim, '--k'); hold off;
title('Delta'); xlabel('Actual'); ylabel('Recovered');
 
figure;
scatter(parsPlot.rho, parsPlot.rhoRecovered, [], parsPlot.Epsilon);
hold on; lsline; plot(xlim, ylim, '--k'); hold off;
title('Rho'); xlabel('Actual'); ylabel('Recovered');
 
figure;
scatter(parsPlot.beta, parsPlot.betaRecovered, [], parsPlot.Epsilon);
hold on; lsline; plot(xlim, ylim, '--k'); hold off;
title('Beta'); xlabel('Actual'); ylabel('Recovered');
 
figure;
scatter(parsPlot.epsilon, parsPlot.epsilonRecovered);
hold on; lsline; plot(xlim, ylim, '--k'); hold off;
title('Epsilon'); xlabel('Actual'); ylabel('Recovered');
 
figure;
scatter(parsPlot.gamma, parsPlot.gammaRecovered, [], parsPlot.Epsilon);
hold on; plot(xlim, ylim, '--k'); hold off;
title('Gamma'); xlabel('Actual'); ylabel('Recovered');
These are a bit more difficult to visually interpret, but nothing looks particularly accurate. Let's take a closer look at situations where we should see good accuracy.
figure;
title('Minimal Noise')
subplot(3, 2, 1);
scatter(parsPlot.alpha(parsPlot.epsilon == 0 & parsPlot.beta > 5), parsPlot.alphaRecovered(parsPlot.epsilon == 0 & parsPlot.beta > 5));
hold on; plot(xlim, ylim, '--k'); hold off;
title('Alpha'); lsline; xlabel('Actual'); ylabel('Recovered');
 
subplot(3, 2, 2);
scatter(parsPlot.delta(parsPlot.epsilon == 0 & parsPlot.beta > 5), parsPlot.deltaRecovered(parsPlot.epsilon == 0 & parsPlot.beta > 5));
hold on; plot(xlim, ylim, '--k'); hold off;
title('Delta'); lsline; xlabel('Actual'); ylabel('Recovered');
 
subplot(3, 2, 3);
scatter(parsPlot.rho(parsPlot.epsilon == 0 & parsPlot.beta > 5), parsPlot.rhoRecovered(parsPlot.epsilon == 0 & parsPlot.beta > 5));
hold on; plot(xlim, ylim, '--k'); hold off;
title('Rho'); lsline; xlabel('Actual'); ylabel('Recovered');
 
subplot(3, 2, 4);
scatter(parsPlot.beta(parsPlot.epsilon == 0 & parsPlot.beta <=10), parsPlot.betaRecovered(parsPlot.epsilon == 0 & parsPlot.beta <=10));
hold on; plot(xlim, ylim, '--k'); hold off;
title('Beta'); lsline; xlabel('Actual'); ylabel('Recovered');
 
subplot(3, 2, 5);
scatter(parsPlot.epsilon(parsPlot.beta > 5), parsPlot.epsilonRecovered(parsPlot.beta > 5));
hold on; plot(xlim, ylim, '--k'); hold off;
title('Epsilon'); lsline; xlabel('Actual'); ylabel('Recovered');
 
subplot(3, 2, 6);
scatter(parsPlot.gamma(parsPlot.epsilon == 0.5 & parsPlot.beta > 5), parsPlot.gammaRecovered(parsPlot.epsilon == 0.5 & parsPlot.beta > 5));
hold on; plot(xlim, ylim, '--k'); hold off;
title('Gamma'); lsline; xlabel('Actual'); ylabel('Recovered');
There does seem to be a positive correlation between these parameters, but it's not particularly reliable. Beta Epsilon, and Gamma are the strongest, Alpha and Delta are pretty poor and Rho seems to be pretty random

2.1 Recovering Free Parameters

First let’s get the trial data from participants
trialData = readtable("C:/Users/DELL/Downloads/Data/Data/HPP_fMRI_beh_data_for_lmm.csv")
trialData = 9104×51 table
 subjecttrial_numrunpartnerApartnerBinitial_Ainitial_BPie_initial_highPie_initial_lowdiff_initialtransfer_size_1fairness_1A_final_1B_final_1Pie_Alter1_ini_highPie_Alter1_ini_lowdiff_final_1reverse_1transfer_size_2fairness_2A_final_2B_final_2Pie_Alter2_ini_highPie_Alter2_ini_lowEqual_alter_pie_ini_highEqual_alter_pie_ini_lowUnqual_alter_pie_ini_highUnequal_alter_pie_ini_lowdiff_final_2reverse_2transfer_difffairness_difftrail_typeorder_optionorder_orientationHPP_keyreactionIf_RevIf_Rev_1Revsize1Revsize2TransferSize1_aTransferSize2_aTransferSize1_bTransferSize2_bMahMalMbhMblTrans1_DiffTrans2_Diff
110111464315215213510107107301214314310714311048121100000501010714340
210121821165165112414714770868138138131475162311201105205.50002.50001478133.50002.5000
31013123165165112414714770361381381381475012111200000203014713810
410141424715315312510108108201214414410814410048121100000501010814440
510151626751515510761288124151010101010101081201-245122011405250812101002
61016160231151511436412124801221414212414212024122100000301012414220
71017144471611611511851251271121521525121521301063211011707.50003.5000105121526.50003.5000
81018124292161621436513135805107111171171354024112200000305013511720
9101912045163163131215415411011451451451415491102311111109106.50004.50001545145.50004.5000
101011011653152152136129898102413413498134904812110000060209813440
11101111204551515510368121284051010101010101012801244122011003050128101020
1210112132211161611551061111650714899898116102411211000050701169820
13101131222351616511610111010111124714147101114770463221011105.50000.50002010111473.50000.5000
1410114170692161621424414144100486121261261446024112200000204014412620
15101151182315215213712898911241341348913490583211011106.50000.500020891344.50000.5000
1610116156293161631348712127501241515412715411036122100000401012715430
171011713439516165112471414770781299129121473154312201100205.50001.50001479123.50001.5000
18101181389115151141161244128112214142412142120104322101180741041214264
19101191622121515213810107710312441313471013490663221011306.50001.5000207101344.50001.5000
2010120182131515312612999901961266129961261-3652210110660639961203
21101211869214142127109779216128888887901-125122011206160798801
221012213031316163132451414590487121271271455024112200000204014512720
2310123140515315312710810810211214414481014410068321101120611081014451
2410124152353151531271010881021245131358101358056322101120612081013541
2510125166652151521351071010730244131341071349036122100000502010713430
26101261503715115114910610610411214214261014212088321101140721061014262
2710127150371631631312154154110241451451451549012111200000102015414510
281012813893151531288117711411241414471114410076322211140621071114452
2910129146473151531251081010820245131351081358036122100000502010813530
3010130124371621621461210810820714999999108011241120110060701089910
311013113025214142125107997206128888889701124121111005060978810
3210132172711531531248117117407108108108101172132311111102406111781021
33101331543315115114510106106401214214210614212048121210000501010614240
34101341404514214212612888801965115118851161-3652121110660638851103
3510135167351616511510101111101024714147111014770361221000005020111014730
36101361323714214212121331331001044124124121338192311111108106413341254
371013712241152152138107107103112143143710143110783211011306.50001.5000107101435.50001.5000
3810138154331421421212133133100241241241241338012111200000102013312410
39101391202131616313981277125112415154712154110863221011506.50002.5000107121545.50002.5000
4010140112611521521348116116508107107107101163142311111103406.50001.50001167102.50001.5000
411014112437142142126128888013611511588115603642121110060308811530
421014215863162162143613513580481261261261356012111200000304013512610
431014316731521521348116116505101071071071163012111200000405011610710
4410144112131611611548125125701215215212515213036121100000401012515230
451014513112161621436513135809101177117111354164312111100307213571142
46101461425321616214244141441001081266126121446184312201100207314461253
4710147130251531531248117117405101081081081172012111200000405011710810
481014815827315153126129999013661212699126603642210110060309912630
49101491525711616115510611116508149889891161134312201100507.50000.5000116892.50000.5000
501015014841316163132451414590981277127121455174312201100206.50002.50001457124.50002.5000
5110151146431621621436135135801086126126121356172311111106307313561243
521015211457162162148128108102171499999981001-1251120112071708109901
5310153142433151531248711117401241414411714410036122100000401011714430
541015412833516165112471414770489121291291473024112200000204014712920
551015523613132132115108787101212312387123904812110000050108712340
56101562425315315312121441441001045135135131448192311201108106414451354
57101572341731616313810118811312451414581114590663221011306.50001.5000208111454.50001.5000
581015823035153153123612612660481171171171264012111110000304012611710
591015926672151521312314143110365121251251437024112200000103014312520
601016024847216162149101177114171499999971101-2451220114072707119902
6110161222273151531212414144100961266126121446184312201100106314461253
621016227313161631336613136708101188118111363154312201100306.50001.50001368113.50001.5000
63101632165316316313712910910112414514591014590583211011106.50000.5000209101454.50000.5000
641016425049152152133612512570986116116111255162311201105306.50002.50001256113.50002.5000
651016522633115151148129779213641212479124805632210112071307912441
661016625661151151147148888011085115118851161-3652110110670738851103
671016721261516165111261515690368131381381565024112200000103015613820
681016824771511511481279792124133133791331006832121112071207913351
691016924445115151148129779217148888887901-125121111207170798801
701017024853151151147148888013612412488124804842121110070308812440
711017126419216162141161355138112315153513153120104322211180741051315364
72101722161721515213123141431101061255125121437194312111100106.50003.50001435125.50003.5000
731017322829115151146127997207148888889701124121111006070978810
741017423637151151147148888014811511588115603642121110070408811530
75101752184916216214510117117406121081081081172012111110000506011710810
7610176210652161621436513135801231515313515312024122100000301013515320
771017722315515510481191192051010101010101011901124111111004050119101010
78101782323315115114121421421202413313313314210012111200000102014213310
7910179250513151531212414144100366121261261446024112200000103014412620
80101802444915515510689119112151010101010101091101-125112011205150911101001
811018128079165165111215615690947147147141567182311111107105.50003.50001567144.50003.5000
821018222241153153123612612660887117117111264152311111104306212671132
8310183248493161631336613136705108111181181363024112200000305013611820
84101842686716216214910711711411215315371115312088321211140721071115362
851018523221516165111261515690861388138131565174312111100105.50002.50001568134.50002.5000
86101862311152152133612512570481161161161255012111110000304012511610
871018722631165165111215615690241471471471567012111200000102015614710
8810188226271531531212144144100241351351351448012111200000102014413510
89101892282916216214510117117401215315311715312048121100000501011715340
901019027311511511461297972024133133971331004812110000060209713340
91101912641915115114121421421201243133133131421011123111111010107514231365
9210192246471631631361210910910241451451091459048121210000602010914540
931019326465151151147148888011164124128841281-4852110110870748841204
94101942767713213211610787811121231237812390583211011105.50000.500010781234.50000.5000
9510195224252161621471499990151071111799117402442221110070509911720
9610196264633161631361291010910366131361091367036122100000603010913630
9710197222214161641236713137607101199119111372144312201100306113791131
98101982202521616214910117711412441414471114410076322101140722071114452
991019928693161631351081111830245141451181459036122100000502011814530
100101100234351151511448511115601221414211514212036122100000401011514230
So we need participants' ID numbers, a0-b2, and their choices in the format 1 or 2. In the above data frame, those are columns 1, 6, 7, 13, 14, 21, 22, and 26 so let's extract those. Also, they only analyzed the rank-reverse condition (perhaps this is why we couldn't reliably recover parameters) so that is "trail_type" 3.
trialData = trialData(trialData.trail_type == 3, [1, 6, 7, 13, 14, 21, 22, 36]);
trialData.Properties.VariableNames = {'SubjectID', 'a0', 'b0', 'a1', 'b1', 'a2', 'b2', 'Chose1'};
trialData.Chose1 = trialData.Chose1 - 1;
trialData.Prob1 = zeros(height(trialData), 1)
trialData = 3704×9 table
 SubjectIDa0b0a1b1a2b2Chose1Prob1
110116514781310
210116151215200
310116315451400
4101516111071400
51011528913400
610151671412910
710111512421400
810121510741300
910115381014400
1010131510851300
1110115161014200
1210131511741410
1310115311781000
1410114213341200
1510115271014300
1610131612741500
1710115211671000
1810121651311700
1910121641412610
201011166119810
2110131651412710
2210116213561200
2310115314451310
2410131611851400
2510131541412610
2610131661311810
2710116391014500
2810115212561110
291011159741200
301011517913310
3110121613531510
3210121531412500
3310116515671400
3410115312671100
3510116271115310
3610151661513800
3710115114231300
381011327812300
3910141671311910
4010121611741400
4110116211781010
4210111511521400
4310116381115410
4410131610961300
451012149741200
4610121410631310
4710116215341410
4810131641513610
4910151612961510
5010131551311700
51101165101115610
5210116171014310
5310121541311600
5410111613421500
5510116281014410
5610121612631510
5710115313561200
5810121661210810
5910116212671110
6010116513891210
6110115214341300
6210116161115210
631011427913300
6410111531311510
6510115213451200
6610121511631410
6710216514781300
6810216151215200
6910216315451410
70102516111071400
711021528913400
7210251671412900
7310211512421410
7410221510741300
7510215381014410
7610231510851310
7710215161014210
7810231511741410
7910215311781000
8010214213341200
8110215271014310
8210231612741510
8310215211671000
8410221651311700
8510221641412600
861021166119800
8710231651412710
8810216213561200
8910215314451300
9010231611851410
9110231541412600
9210231661311800
9310216391014510
9410215212561100
951021159741210
961021517913310
9710221613531510
9810221531412500
9910216515671400
10010215312671100
Now we can define some necessary variables and functions
included_subjects = unique(trialData.SubjectID);
 
subjectData = table();
 
grab_data = @(subject) trialData(trialData.SubjectID == subject, 2:end);
 
function updated = addPredictions(trialData, subject, predictions)
trialData.Prob1(trialData.SubjectID == subject) = predictions;
updated = trialData;
end
Which allows us to Recover Free Parameters and Define Predicted Decisions
for i = 1:length(included_subjects)
df = grab_data(included_subjects(i));
result = optimize(@obj_function, initial_params, lower_bounds, upper_bounds, df);
 
df.Prob1 = generatePredictions(result, df);
model_SS = sum((df.Chose1 - df.Prob1).^2);
model_NLL = -2 * sum(df.Chose1 .* log(df.Prob1) + (1 - df.Chose1) .* log(1 - df.Prob1));
subjectData(i, :) = {included_subjects(i), result(1), result(2), result(3), result(4), result(5), result(6), model_SS, model_NLL};
trialData = addPredictions(trialData, included_subjects(i), df.Prob1);
end
subjectData.Properties.VariableNames = {'subjectID', 'Alpha', 'Delta', 'Rho', 'Beta', 'Epsilon', 'Gamma', 'SS', 'Deviance'}
subjectData = 57×9 table
 subjectIDAlphaDeltaRhoBetaEpsilonGammaSSDeviance
11010.41361.43231.05774.20550.4394-0.051716.121289.9689
21021.27810.08971.30415.71550.1256-0.34477.352248.5749
31031.58950.10320.62477.01360.0157-0.49810.96888.9042
41041.67090.07400.52217.76460.01520.49800.96978.9665
51050.51151.39431.29664.49850.0313-0.49901.875014.9667
61061.69460.07680.54458.05680.1529-0.19358.233954.0180
71070.47451.51791.10494.36730.2273-0.100011.454569.9661
81081.98241.35078.0938e-079.91940.4165-0.191915.510387.4497
91090.43101.45411.05304.42650.3437-0.272712.187571.9739
101101.0132e-060.19292.5954e-064.14370.0577-0.50003.438824.0005
111111.58580.10390.61356.96254.2555e-05-0.00621.4398e-070.0060
121121.63561.67262.1653e-069.44040.2840-0.129613.925480.4270
131131.58580.10390.61356.96254.2555e-05-0.00621.4398e-070.0060
141141.05760.03581.47006.37090.1822-0.33169.659560.1556
151151.57440.10180.62226.86100.1364-0.49986.545538.6772
161161.59720.10260.66776.98380.0909-0.49974.909131.2975
171171.42591.94371.95089.87620.2435-0.060012.009772.2680
181181.68350.07950.47317.87190.1979-0.34219.208356.9726
191191.58730.10380.62697.04310.0152-0.49800.96978.9667
201201.70040.07560.52707.74860.3987-0.108115.117485.4669
211210.72720.06670.13014.11870.3379-0.239912.907375.6418
221221.58570.10390.61346.96324.2555e-05-0.00631.4398e-070.0060
231240.64940.09210.11903.75900.28600.022613.819580.4151
241251.37940.12550.00202.24760.02560.49881.874015.0043
251261.17920.78900.90544.17530.5000-0.250012.000071.9789
261270.66931.20871.09363.99100.5000-0.075816.121289.9744
271281.58900.10500.61627.04370.0455-0.49932.727320.1103
281290.97420.76862.000010.00000.2698-0.283713.441077.5883
291301.59520.09950.57866.53400.0157-0.49810.96888.9041
301311.64930.07400.40947.76470.0909-0.16675.393939.4657
311320.41691.42411.09764.41550.1212-0.37506.484843.0681
321330.70841.20334.5178e-060.23280.2110-0.393713.822580.5423
331340.72601.23391.05534.00470.5000-0.084615.784688.2386
341350.74420.53422.000010.00000.3065-0.243314.014480.4176
351361.66650.07140.54787.74690.1061-0.35715.878840.2556
361380.44291.46701.23194.06190.3571-0.200012.214370.6853
371401.50161.54221.99979.99870.3678-0.005815.137785.6076
381411.99881.71301.99979.99900.4157-0.101115.333785.6393
391420.77241.17491.05314.06790.5000-0.060616.257690.5233
401430.58521.37121.11574.00800.5000-0.115415.384686.6162
411441.58520.10320.62376.97780.0162-0.49810.96778.8396
421450.75591.19490.97634.11780.5000-0.129014.467781.7741
431460.77591.17891.04284.03850.5000-0.136415.272786.5236
441470.62601.25151.18704.50354.8527e-050.02931.6811e-070.0066
451481.60860.10350.64506.88640.0909-0.49974.909131.2975
461491.99970.36355.5472e-049.99862.8567e-05-0.31390.50132.8825
471500.75850.58012.00009.99990.2887-0.318912.439472.0456
481511.61120.06910.45438.10680.2214-0.17749.728659.6184
491520.91365.8932e-071.56627.08970.1727-0.114010.359664.9554
501530.59631.37841.02565.95770.40910.055615.818288.7364
511541.68220.08100.48127.80660.0303-1.1914e-071.939417.9248
521552.00000.36273.8484e-059.99990.04690.17753.347226.6615
531560.62921.25471.0216e-051.65160.0310-0.38742.766421.6691
541570.52122.1390e-061.28951.39130.1660-0.500013.289777.3106
551581.56121.99931.99939.99820.4772-0.066416.223990.3865
561600.38471.41311.06444.19580.15340.09268.409155.4065
571610.43221.37000.87574.67770.4129-0.197214.060680.9767

2.2 Compute Model Fit Index

We will calculate BIC as the model fit index because we are attempting to model the probabilistic nature of the data generation process.
subjectData.BIC = subjectData.Deviance + log(65) * 6;

2.3 Identify the Best Model

We need to define new objective functions for each model. Since each model uses the same utility function, but holds some variables constant (at 0), we only really need to modify the number of parameter inputs and set the constant values to 0. We can also use a list of indices so that the we can use the same function to generate predictions down the line.
function f = of_alphaOnly(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new([1, 4:6]) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_deltaOnly(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new([2, 4:6]) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_rhoOnly(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new(3:6) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_ad(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new([1:2, 4:6]) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_ar(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new([1, 3:6]) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_dr(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new(2:6) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_noEpsilon(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new(1:4) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_noGamma(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new(1:5) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
function f = of_GammaOnly(params, df, optimMethod)
if nargin < 3
optimMethod = 'MLE';
end
params_new = zeros(1, 6);
params_new(5) = 0.5;
params_new(6) = params;
Prob1 = generatePredictions(params_new, df);
Chose1 = df.Chose1;
if strcmp(optimMethod, 'OLS')
f = sum((Chose1 - Prob1).^2);
elseif strcmp(optimMethod, 'MLE')
f = -sum(Chose1 .* log(Prob1) + (1 - Chose1) .* log(1 - Prob1));
end
end
 
ofs = {@of_alphaOnly, @of_deltaOnly, @of_rhoOnly, @of_ad, @of_ar, @of_dr, @of_noEpsilon, @of_noGamma, @of_GammaOnly};
idxs = {[1, 4:6], [2, 4:6], 3:6, [1:2, 4:6], [1, 3:6], 2:6, 1:4, 1:5, 6};
And now we can preallocate the predictions for each model and the new data frame
altSubjectData = table();
 
altTrialData = trialData;
altTrialData.Prob1 = []; % Remove the 9th column
altTrialData.alphaOnly_Prob1 = zeros(height(altTrialData), 1);
altTrialData.deltaOnly_Prob1 = zeros(height(altTrialData), 1);
altTrialData.rhoOnly_Prob1 = zeros(height(altTrialData), 1);
altTrialData.ad_Prob1 = zeros(height(altTrialData), 1);
altTrialData.ar_Prob1 = zeros(height(altTrialData), 1);
altTrialData.dr_Prob1 = zeros(height(altTrialData), 1);
altTrialData.noEpsilon_Prob1 = zeros(height(altTrialData), 1);
altTrialData.noGamma_Prob1 = zeros(height(altTrialData), 1);
altTrialData.gammaOnly_Prob1 = zeros(height(altTrialData), 1);
Now let's Recover Free Parameters and Generate Predictions for this Model
for i = 1:length(included_subjects)
df = grab_data(included_subjects(i));
numParams = sum(cellfun(@length, idxs)) + 2 * 9;
outputs = zeros(1, numParams);
j = 0;
 
for k = 1:length(idxs)
idx = idxs{k};
initials = initial_params(idx);
uppers = upper_bounds(idx);
lowers = lower_bounds(idx);
of = ofs{k};
 
result = optimize(of, initials, lowers, uppers, df);
 
pars = zeros(1, 6);
pars(idx) = result;
df.Prob1 = generatePredictions(pars, df);
 
model_SS = sum((df.Chose1 - df.Prob1).^2);
model_NLL = -2 * sum(df.Chose1 .* log(df.Prob1) + (1 - df.Chose1) .* log(1 - df.Prob1));
outputs((j+1):(j+2+length(result))) = [result, model_SS, model_NLL];
j = j + 2 + length(result);
 
altTrialData{altTrialData.SubjectID == included_subjects(i), (8+k)} = df.Prob1;
end
altSubjectData(i, 1:56) = array2table([included_subjects(i), outputs]);
end
 
altSubjectData.Properties.VariableNames = {'SubjectID', 'Alpha_M1', 'Beta_M1', 'Epsilon_M1', 'Gamma_M1', 'SS_M1', 'Deviance_M1', ...
'Delta_M2', 'Beta_M2', 'Epsilon_M2', 'Gamma_M2', 'SS_M2', 'Deviance_M2', ...
'Rho_M3', 'Beta_M3', 'Epsilon_M3', 'Gamma_M3', 'SS_M3', 'Deviance_M3', ...
'Alpha_M4', 'Delta_M4', 'Beta_M4', 'Epsilon_M4', 'Gamma_M4', 'SS_M4', 'Deviance_M4', ...
'Alpha_M5', 'Rho_M5', 'Beta_M5', 'Epsilon_M5', 'Gamma_M5', 'SS_M5', 'Deviance_M5', ...
'Delta_M6', 'Rho_M6', 'Beta_M6', 'Epsilon_M6', 'Gamma_M6', 'SS_M6', 'Deviance_M6', ...
'Alpha_M7', 'Delta_M7', 'Rho_M7', 'Beta_M7', 'SS_M7', 'Deviance_M7', ...
'Alpha_M8', 'Delta_M8', 'Rho_M8', 'Beta_M8', 'Epsilon_M8', 'SS_M8', 'Deviance_M8', ...
'Gamma_M9', 'SS_M9', 'Deviance_M9'};
 
altSubjectData(:, 2:end) = varfun(@double, altSubjectData(:, 2:end))
altSubjectData = 57×56 table
 SubjectIDAlpha_M1Beta_M1Epsilon_M1Gamma_M1SS_M1Deviance_M1Delta_M2Beta_M2Epsilon_M2Gamma_M2SS_M2Deviance_M2Rho_M3Beta_M3Epsilon_M3Gamma_M3SS_M3Deviance_M3Alpha_M4Delta_M4Beta_M4Epsilon_M4Gamma_M4SS_M4Deviance_M4Alpha_M5Rho_M5Beta_M5Epsilon_M5Gamma_M5SS_M5Deviance_M5Delta_M6Rho_M6Beta_M6Epsilon_M6Gamma_M6SS_M6Deviance_M6Alpha_M7Delta_M7Rho_M7Beta_M7SS_M7Deviance_M7Alpha_M8Delta_M8Rho_M8Beta_M8Epsilon_M8SS_M8Deviance_M8Gamma_M9SS_M9Deviance_M9
11010.99964.02040.5000-0.045516.363690.94921.17554.07660.4394-0.051716.121289.96890.92393.96470.4362-0.052116.121289.96890.23451.45384.34290.4394-0.051716.121289.96890.98761.00594.01660.5000-0.045516.363690.94921.00001.00004.01840.4394-0.051716.121289.96895.2475e-072.4427e-071.34790.180716.257690.52330.44121.45971.06914.11160.439416.257690.5233-0.045516.500091.4954
21021.62954.36510.1364-0.27787.393949.19541.00004.03810.5000-0.075816.121289.97440.19163.58480.5000-0.075816.121289.97441.15130.16513.12180.1256-0.34447.352348.57291.47840.41835.19710.1364-0.27787.393949.19541.00001.00004.03810.5000-0.075816.121289.97441.38390.18704.5093e-042.11358.8712284.20211.60390.08580.39617.62610.13647.772752.5765-0.075816.500091.4954
31031.36984.17020.0157-0.49790.96888.90470.99994.02690.5000-0.023116.215489.97060.10243.41580.5000-0.023116.215489.97061.48620.13286.65320.0157-0.49810.96888.90411.33810.81346.31710.0157-0.49780.96888.90461.00001.00004.02690.5000-0.023116.215489.97061.54800.07280.47745.00981.000046.05311.67140.07920.53777.74960.01540.984610.3333-0.023116.250090.1091
41040.51142.77833.2654e-06-0.13730.94477.85350.99994.01630.50000.015216.484991.43480.02371.46940.49740.015216.487791.44641.59140.09897.50940.01520.49800.96978.96650.87761.05864.06825.0665e-050.05210.94447.73051.00001.00004.01630.50000.015216.484991.43481.16630.00630.91742.09080.94867.82161.64180.06910.56217.50300.01520.984810.36410.015216.500091.4954
51050.99054.02250.5000-0.031215.937588.47270.18204.08230.0208-0.50001.800614.80221.50687.55550.0313-0.49911.875014.96714.0444e-061.03250.71970.0208-0.50001.800614.80220.02781.82049.01390.0313-0.49901.875014.96670.20373.2526e-063.64860.0208-0.50001.800614.80221.8653e-060.74611.0950e-050.85981.841418.83930.47951.38701.10754.41810.03131.937517.7998-0.03121688.7228
61061.32126.60750.1529-0.19358.233954.01801.00004.02600.5000-0.053816.061589.35388.4391e-043.34990.4994-0.053916.061689.35401.61090.09867.58990.1529-0.19358.233954.01801.61530.60998.01100.1529-0.19358.233954.01801.00001.00004.02600.5000-0.053816.061589.35381.65020.08550.59054.777110.0000473.74531.70220.07400.51348.00400.15388.461555.8120-0.053816.250090.1091
71070.99284.02490.5000-0.045516.363690.94921.37874.24270.2273-0.100011.454569.96611.01754.03780.2182-0.104211.454569.96610.28271.61904.05340.2273-0.100011.454569.96610.01711.72088.26530.2273-0.100011.454569.96611.15381.05134.15270.2273-0.100011.454569.96615.0894e-083.1019e-070.31823.845911.590970.74670.37091.35231.24304.26900.227311.590970.7467-0.045516.500091.4954
81081.00114.06230.5000-0.106115.757688.50310.01173.63710.1054-0.499715.524387.55550.95294.76010.4690-0.113115.697088.24900.79530.65731.39500.3935-0.204015.495787.46191.01390.99284.01360.5000-0.106115.757688.50311.00001.00004.02470.4697-0.112915.697088.24902.0441e-061.33451.0884e-050.025716.257390.51921.41880.29560.82114.15540.500016.500091.4954-0.106116.500091.4954
91090.98494.06620.5000-0.187513.750079.49911.16724.07600.3437-0.272712.187571.97390.99574.98790.3416-0.274512.187571.97390.33951.61474.46530.3437-0.272712.187571.97390.23871.99999.99960.3209-0.327712.105371.35911.00001.00004.00470.3437-0.272712.187571.97396.3150e-090.00950.05755.277614.404782.22800.28921.31991.05114.18610.343714.437582.3669-0.18751688.7228
101100.98974.01580.5000-0.064515.241984.91510.26423.02560.0577-0.50003.438824.00051.49757.48730.0645-0.49953.483923.84605.4358e-061.03500.77230.0577-0.50003.438824.00050.02941.81678.98770.0645-0.49953.483923.84571.01891.3708e-050.78450.0577-0.50003.438824.00054.1573e-070.55173.1257e-050.91533.732532.30450.55951.31541.07185.88120.06453.741929.6629-0.064515.500085.9503
111111.11495.57435.4500e-05-0.00342.0413e-070.00730.99993.98210.50001.4217e-0816.500091.49540.03433.44610.4991-7.3164e-0616.503791.51021.49070.13526.67394.2872e-05-0.01111.3984e-070.00601.33170.81956.28154.7994e-05-0.00871.6948e-070.00671.00001.00003.99000.50003.4442e-0916.500091.49541.58100.09890.62686.40282.4791e-088.0190e-041.58370.10300.62346.97334.2490e-051.4420e-070.0060016.500091.4954
121120.99104.01480.50000.023116.215489.97060.08971.24210.02690.499814.181181.51380.94073.99620.33030.038714.511483.01131.62651.66339.48700.2840-0.129613.925480.42700.25301.99999.99960.2865-0.045314.060681.06580.20321.3894e-070.54820.02690.500014.181181.51380.71541.15870.16620.154013.981480.73361.26111.33434.5506e-066.46420.319714.196481.70870.023116.250090.1091
131131.11495.57435.4500e-05-0.00342.0413e-070.00730.99993.98210.50001.4217e-0816.500091.49540.03433.44610.4991-7.3164e-0616.503791.51021.49070.13526.67394.2872e-05-0.01111.3984e-070.00601.33170.81956.28154.7994e-05-0.00871.6948e-070.00671.00001.00003.99000.50003.4442e-0916.500091.49541.58100.09890.62686.40282.4791e-088.0190e-041.58370.10300.62346.97334.2490e-051.4420e-070.0060016.500091.4954
141140.33433.83200.1835-0.32449.681260.39351.00004.08750.5000-0.106115.757688.50311.8101e-072.61610.2888-0.183615.757688.50311.07940.12151.95240.1824-0.33059.660360.16630.91230.56662.02550.1841-0.32259.680760.39041.00001.00004.08750.5000-0.106115.757688.50311.33775.1408e-073.8893e-060.229712.450479.73451.70540.07040.57667.62460.197010.439465.4948-0.106116.500091.4954
151151.11715.58510.1364-0.49976.545538.67751.00004.11590.5000-0.136415.272786.52363.6313e-075.49070.4574-0.149115.272786.52361.47490.13446.64210.1364-0.49986.545538.67711.30100.60525.66580.1364-0.49986.545538.67761.00001.00004.11600.5000-0.136415.272786.52361.58820.09860.64506.51179.0000414.46601.68600.07000.54648.00860.13647.772752.5765-0.136416.500091.4954
161161.12265.61290.0909-0.49964.909131.29781.00004.08180.5000-0.090915.954589.30145.8531e-043.07480.4961-0.091615.954789.30221.47510.13276.64710.0909-0.49974.909131.29741.29900.66426.01690.0909-0.49974.909131.29741.00001.00004.08190.5000-0.090915.954589.30141.54600.06110.36074.41326.0000276.31201.70560.07620.53787.96710.09095.454540.2120-0.090916.500091.4954
171170.99324.01520.5000-0.023116.215489.97061.36714.23840.2457-0.054912.014272.29340.98254.01500.2357-0.057312.014272.29340.24641.34795.05950.2457-0.054912.014272.29340.25282.000010.00000.1623-0.339810.927665.69191.09331.03344.10260.2457-0.054912.014272.29344.5155e-074.2333e-071.35260.827412.061572.54910.41631.46021.04364.15100.246212.061572.5491-0.023116.250090.1091
181181.68754.25750.1979-0.34219.208356.97261.00004.09720.5000-0.130815.138585.61091.1343e-073.17830.2174-0.300815.138585.61091.59120.09377.42900.1979-0.34219.208356.97261.49230.75126.95520.1979-0.34219.208356.97261.00001.00004.09720.5000-0.130815.138585.61091.35124.1967e-071.7084e-050.970212.9343233.44831.69510.06890.48637.93530.200010.400065.0523-0.130816.250090.1091
191191.34724.16920.0152-0.49770.96978.96740.99994.01630.5000-0.015216.484991.43480.02301.48730.4974-0.015216.487791.44641.49340.13516.68850.0152-0.49800.96978.96671.60100.67624.20360.0152-0.49790.96978.96741.00001.00004.01630.5000-0.015216.484991.43481.56660.08830.48645.58501.000046.05261.66050.08130.50877.78680.01520.984810.3641-0.015216.500091.4954
201201.77794.23650.3987-0.108115.117485.46691.00004.08990.5000-0.084615.784688.23860.04173.57510.4998-0.084715.784888.23941.25020.76773.86690.5000-0.084615.784688.23861.75630.25684.85300.3987-0.108115.117485.46691.00001.00004.08990.5000-0.084615.784688.23861.04340.52970.55343.3489e-0816.250090.10911.23890.67920.98153.79330.500016.250090.1091-0.084616.250090.1091
211211.57604.20960.3409-0.233312.909175.65461.00004.05930.5000-0.161514.553883.20135.2627e-083.55980.2926-0.276014.553883.20131.12210.11332.39050.3376-0.240512.907375.64171.53350.81036.91860.3409-0.233312.909175.65461.00001.00004.05940.5000-0.161514.553883.20130.02788.1461e-096.3605e-083.102815.478187.02110.34181.61161.04744.02460.500016.250090.1091-0.161516.250090.1091
221221.11495.57425.4500e-05-0.00342.0413e-070.00730.99993.98210.50001.3140e-0816.500091.49540.03423.44540.4990-7.7564e-0616.503791.51021.49060.13526.67384.2872e-05-0.01111.3984e-070.00601.33200.81966.27994.7989e-05-0.00841.6945e-070.00671.00001.00003.98910.50009.3726e-0916.500091.49551.58100.09890.62686.40282.4791e-088.0190e-041.58470.10250.63066.97474.2481e-051.4425e-070.0060016.500091.4954
231241.24974.08800.30300.050013.878880.68281.00004.03400.50000.030316.439491.25299.8594e-043.45090.49910.030416.439491.25301.07220.16722.04520.28550.022213.819480.41511.40590.79614.09250.30300.050013.878880.68281.00001.00004.03410.50000.030316.439491.25291.36895.2492e-064.0752e-060.120713.953280.94031.34810.21992.5344e-041.49820.281513.826780.45170.030316.500091.4954
241251.66264.48970.03030.49901.878815.09380.99994.02670.50000.030316.439491.25290.10193.41580.50000.030316.439491.25291.12920.10282.74370.02560.49881.874015.00431.47560.73507.04590.03030.49891.878815.09431.00001.00004.02670.50000.030316.439491.25290.62300.05120.12593.73661.921021.45111.35950.12361.5615e-041.68980.01781.904717.00560.030316.500091.4954
251260.32723.87010.3935-0.319311.335268.35711.00004.07690.5000-0.250012.000071.97890.09902.87930.4680-0.267312.060172.29971.11860.87484.11860.5000-0.250012.000071.97890.86901.51857.88740.3892-0.328711.330168.32471.00001.00004.07690.5000-0.250012.000071.97891.35111.7080e-061.4988e-050.006815.990988.68650.49291.20890.85433.98810.500016.000088.7228-0.25001688.7228
261271.31996.59760.3788-0.100015.151585.95971.00004.11080.5000-0.075816.121289.97442.1886e-043.50740.4997-0.075816.121289.97441.60110.09227.59400.3788-0.100015.151585.95971.61270.43027.27170.3788-0.100015.151585.95971.00001.00004.11090.5000-0.075816.121289.97441.34104.7227e-074.3497e-060.019916.420691.18070.79171.05371.00634.00900.500016.500091.4954-0.075816.500091.4954
271281.11055.47540.0455-0.49922.727320.11090.99994.02590.5000-0.045516.363690.94920.00593.26320.4997-0.045516.363890.95001.53740.14346.63910.0455-0.49932.727320.11021.37310.53265.06210.0455-0.49932.727320.11081.00001.00004.02590.5000-0.045516.363690.94921.60760.07100.54594.91573.0000138.15631.68390.07640.50027.88190.04552.863624.4078-0.045516.500091.4954
281291.00154.08340.5000-0.053816.061589.35381.27754.13030.3546-0.072814.688483.71480.94744.00540.3480-0.074214.688483.71481.84212.000010.00000.3068-0.228214.118980.85100.93051.03494.00880.5000-0.053816.061589.35381.00001.00004.03830.3546-0.072814.688483.71480.12140.37341.16350.177715.065485.35510.44760.21302.00009.99980.316514.268782.0730-0.053816.250090.1091
291301.36854.17800.0157-0.49780.96888.90460.99994.01630.5000-0.015615.984488.66030.04693.33880.5000-0.015615.984688.66111.48060.13986.96810.0157-0.49810.96888.90411.61250.67014.20980.0157-0.49790.96888.90481.00001.00004.01630.5000-0.015615.984488.66031.57790.10420.64686.88971.000046.05241.66360.07720.52477.66330.01560.984410.3021-0.01561688.7228
301311.76534.67470.0909-0.16675.393939.46570.99994.02760.5000-0.030316.439491.25290.01682.80240.4973-0.030716.442891.26641.59880.09427.51680.0909-0.16675.393939.46571.52930.65937.41290.0909-0.16675.393939.46571.00001.00004.02760.5000-0.030316.439491.25291.60060.08630.66605.94016.0000285.52141.66300.07420.48178.04970.09095.454540.2120-0.030316.500091.4954
311320.99654.08070.5000-0.090915.954589.30140.34693.93800.1201-0.37586.482243.06531.16495.51480.1200-0.37886.484843.06810.48031.46816.73190.1212-0.37506.484843.06810.22111.99949.99700.1061-0.50006.443841.34171.00001.00004.09950.1212-0.37506.484843.06812.5178e-081.1479e-070.43314.57427.030348.75200.57911.49150.98045.35040.12127.030348.7520-0.090916.500091.4954
321330.99724.01120.5000-0.136415.272786.52360.04762.46690.1340-0.500013.907680.52460.89993.98210.3719-0.183414.303082.27200.48540.82460.33970.2110-0.393713.822580.54230.01581.74458.47150.3788-0.180014.303082.27210.08673.4871e-081.35380.1340-0.500013.907680.52467.4038e-071.33866.4444e-060.063415.098585.76324.6123e-061.33424.0143e-050.06378.3208e-0515.098585.7632-0.136416.500091.4954
331340.27843.78970.3565-0.134614.620183.33391.00003.97570.5000-0.084615.784688.23860.00133.48110.4952-0.085515.784888.23940.30470.02154.16170.3549-0.137714.617783.32000.86931.67318.82580.3344-0.179614.539082.93741.00001.00003.97570.5000-0.084615.784688.23861.35594.9393e-074.6904e-060.034316.023289.22670.73501.15870.91224.03840.500016.250090.1091-0.084616.250090.1091
341351.00194.08120.5000-0.069215.938588.85901.27894.11990.3835-0.092615.056885.22410.96844.03890.3788-0.093815.056885.22411.50171.99609.98670.3549-0.149914.762783.81640.86941.06633.99690.5000-0.069215.938588.85901.10461.03524.09000.3835-0.092615.056885.22412.0210e-073.5272e-071.28420.366015.384686.61620.33100.10871.99989.99900.362915.221885.9427-0.069216.250090.1091
351361.30066.65210.1061-0.35715.878840.25561.00004.03770.5000-0.075816.121289.97440.19013.48700.5000-0.075816.121289.97441.58230.09757.43610.1061-0.35715.878840.25561.52400.31116.03260.1061-0.35715.878840.25561.00001.00004.03770.5000-0.075816.121289.97441.53070.08040.39595.19667.0000326.96821.73300.05770.66067.61530.10616.257644.6423-0.075816.500091.4954
361380.99884.01060.5000-0.137913.396675.93351.32804.10720.3571-0.200012.214370.68530.92783.98780.3499-0.204112.214370.68530.33351.58394.51840.3571-0.200012.214370.68530.90861.04514.00160.5000-0.137913.396675.93351.00001.00004.02130.3571-0.200012.214370.68532.6759e-094.9825e-091.34860.420013.396675.93350.49391.55461.10104.06850.362113.396675.9335-0.137914.500080.4051
371400.99854.02330.50000.038516.153889.72411.37934.15350.40060.046115.511487.12190.88533.96920.39450.046815.511487.12190.80871.31106.22110.39520.040015.491287.04080.65201.17023.97870.50000.038516.153889.72411.25261.05954.14280.40060.046115.511487.12190.06773.0698e-071.34220.398315.587187.43911.50331.54521.99979.99870.369215.138885.61210.038516.250090.1091
381411.30416.51540.4768-0.041215.616986.80301.00004.01920.5000-0.039715.650886.93930.01243.59950.4888-0.040615.651586.94221.84461.99989.99910.4265-0.086215.382885.84581.47200.59496.95540.4768-0.041215.616986.80301.00001.00004.01920.5000-0.039715.650886.93930.81820.24850.89585.0677e-0715.750087.33661.68310.07290.51117.45090.476215.714387.1936-0.039715.750087.3365
391421.30286.51490.4242-0.071415.878888.97921.00004.05140.5000-0.060616.257690.52330.04481.74380.4684-0.064816.270090.57391.57430.09007.24950.4242-0.071415.878888.97921.40400.58117.42040.4242-0.071415.878888.97921.000014.05140.5000-0.060616.257690.52331.33051.0211e-061.1015e-050.019916.421191.18060.69111.07011.06754.00150.500016.500091.4954-0.060616.500091.4954
401431.30486.55310.2633-0.212211.742470.41171.00004.08100.5000-0.115415.384686.61625.3342e-081.99970.2549-0.226315.384686.61621.59820.09407.67070.2633-0.212211.742470.41171.89680.52884.89410.2633-0.212211.742470.41171.00001.00004.08100.5000-0.115415.384686.61620.04506.3723e-094.8005e-083.387714.229882.13170.36621.53651.14054.00270.500016.250090.1092-0.115416.250090.1091
411441.36534.16870.0162-0.49790.96778.84010.99994.02700.5000-0.023815.714387.19360.01592.74260.4974-0.024715.717887.20761.49620.13526.63200.0162-0.49810.96778.83951.60520.67894.20840.0162-0.49800.96778.84031.00001.00004.02700.5000-0.023815.714387.19361.59730.08040.58984.98271.000046.05371.66490.08060.51587.72450.01590.984110.2703-0.023815.750087.3365
421450.15553.78590.4753-0.139414.442981.66571.00004.03030.5000-0.129014.467781.77410.79243.72150.4981-0.129314.471381.78941.84411.78139.53240.4296-0.192614.248280.79150.69631.99969.99820.4449-0.178914.336581.19591.00001.00004.03030.5000-0.129014.467781.77410.93830.02350.17260.003815.517386.01940.79101.26911.04304.29080.500015.500085.9503-0.129015.500085.9503
431461.26396.27200.4697-0.145215.212186.26151.00004.01580.5000-0.136415.272786.52360.00623.54850.4965-0.137315.272986.52431.62591.66098.96480.4761-0.154715.244986.40501.42800.65116.91700.4697-0.145215.212186.26151.00001.00004.01580.5000-0.136415.272786.52360.76750.60070.96047.2121e-0716.500091.49551.65500.08150.51417.80160.469716.439491.2529-0.136416.500091.4954
441470.98624.01130.5000-0.007716.246290.09381.01334.99608.5216e-059.3067e-044.7218e-070.01111.52657.63464.5476e-05-0.00271.9064e-070.00700.56351.33525.83444.3832e-05-0.00211.3269e-070.00580.05211.69388.22763.9041e-050.03231.6941e-070.00651.00021.00014.99688.5243e-050.00144.7237e-070.01110.64021.28840.71744.85621.8534e-087.2232e-040.59311.26911.06704.34084.3729e-051.3274e-070.0059-0.007716.250090.1091
451481.34554.20340.0909-0.49964.909131.29801.00004.08180.5000-0.090915.954589.30145.8531e-043.07480.4961-0.091615.954789.30221.50160.13726.68830.0909-0.49974.909131.29741.29430.66526.04900.0909-0.49974.909131.29741.00001.00004.08190.5000-0.090915.954589.30141.62050.08680.52705.22786.0000276.31131.69540.07260.49207.90970.09095.454540.2120-0.090916.500091.4954
461490.51592.75403.2517e-06-0.13370.94477.85350.99994.01630.50000.015216.484991.43480.02331.48390.49740.015216.487791.44641.99970.36359.99862.8582e-05-0.31410.50132.88250.87761.05864.06825.0613e-050.05210.94447.73051.00001.00004.01630.50000.015216.484991.43481.99970.36355.5497e-049.99860.50132.87881.99990.36351.1126e-049.99972.5650e-060.50132.87900.015216.500091.4954
471500.99864.01570.5000-0.112914.709782.76151.30924.15620.3260-0.164512.835474.67450.91123.99510.3167-0.169412.835474.67450.50200.85573.93000.3174-0.181012.807174.44960.90561.04664.00110.5000-0.112914.709782.76151.00001.00004.02520.3260-0.164512.835474.67455.1432e-071.34221.4554e-050.087213.243876.59460.58441.29761.11686.10300.322613.548477.9711-0.112915.500085.9503
481511.34196.68100.2214-0.17749.728659.61840.99994.01960.5000-0.069014.224179.29810.20813.52290.5000-0.069014.224179.29811.57870.08517.47170.2214-0.17749.728659.61841.54400.64807.22280.2214-0.17749.728659.61841.00001.00004.01970.5000-0.069014.224179.29810.34071.1267e-073.7153e-070.634011.363967.82361.66710.07240.47118.14090.224110.086261.7231-0.069014.500080.4051
491520.28073.86460.1759-0.098210.378765.07760.99994.01670.5000-0.015216.484991.43480.07533.46030.5000-0.015116.485091.43551.54000.08897.36380.1970-0.038510.424265.39890.90521.57747.94160.1727-0.114010.359664.95541.00001.00004.01670.5000-0.015216.484991.43481.34988.4464e-076.7136e-060.241311.357372.98720.95592.6209e-071.53147.44750.187510.423665.3969-0.015216.500091.4954
501530.99854.02270.50000.045516.363690.94921.03324.98780.40910.055615.818288.73640.93333.97850.40450.056215.818288.73640.46231.43666.39940.40910.055615.818288.73640.80661.10143.99260.50000.045516.363690.94920.99880.99915.00010.40910.055615.818288.73643.7629e-082.3571e-090.10643.455015.954589.30140.52631.50881.13914.35970.409115.954589.30140.045516.500091.4954
511541.32006.60390.0303-2.5112e-071.939417.92480.99993.98480.50001.1207e-0816.500091.49540.04503.48780.4992-2.8185e-0516.503791.51021.58540.09547.40020.03031.1926e-071.939417.92481.52640.68677.30450.0303-2.5745e-061.939417.92481.00001.00003.98240.5000-8.7941e-0916.500091.49541.56920.08820.49105.58342.000096.70941.67800.07730.51247.83430.03031.939417.9248016.500091.4954
521551.29746.48830.06060.25003.697029.06840.99994.02710.50000.030316.439491.25290.01512.58550.49710.030916.442791.26622.00000.362710.00000.04690.17753.347226.66151.53220.69757.34060.06060.25003.697029.06841.00001.00004.02710.50000.030316.439491.25292.00000.36352.7837e-059.99993.5013150.24412.00000.36344.4299e-059.99990.04633.364827.09140.030316.500091.4954
531560.98634.01150.5000-0.015216.484991.43481.33584.18760.0455-0.16672.848524.05211.24064.36730.0414-0.18302.848524.05210.48710.97132.13340.0310-0.38742.766421.66900.01791.77168.66530.0455-0.16672.848524.05211.00001.00004.06680.0455-0.16672.848524.05210.60211.28221.3695e-051.27032.860852.66510.61551.27151.0495e-051.62290.03372.801023.5251-0.015216.500091.4954
541570.15541.48750.1110-0.500013.572778.74011.00004.05180.5000-0.053816.061589.35380.01453.28780.4932-0.054616.065289.36870.94270.26190.43320.1366-0.500013.348677.70720.39870.98651.81860.1660-0.500013.289777.31061.00001.00004.05180.5000-0.053816.061589.35381.33832.4066e-063.4647e-060.104914.404182.64562.00000.36137.6701e-0610.00000.317714.161381.5627-0.053816.250090.1091
551581.00384.05270.5000-0.060616.257690.52340.42013.88530.4848-0.062516.242490.46160.93994.74120.4845-0.062516.242490.46180.52331.46236.30150.4848-0.062516.242490.46181.10200.93884.05160.5000-0.060616.257690.52331.00001.00004.04990.4848-0.062516.242490.46180.77821.00711.20645.2343e-0816.500091.49540.51551.34961.11184.17440.484816.484891.4348-0.060616.500091.4954
561600.98904.01100.50000.023116.215489.97061.33484.12840.15340.09268.409155.40651.07744.05970.14460.09838.409155.40650.45241.46046.41630.15340.09268.409155.40650.21921.99999.99940.14130.05768.384355.24181.00001.00004.03580.15340.09268.409155.40650.00993.3245e-080.38535.06748.450555.74430.50881.55330.99814.50660.15388.461555.81200.023116.250090.1091
571610.99334.07410.5000-0.161514.553883.20131.20484.10660.4129-0.197214.060680.97670.92373.99030.4084-0.199414.060680.97670.34901.58334.74390.4129-0.197214.060680.97670.01971.80938.95450.4129-0.197214.060680.97671.13810.98754.06850.4129-0.197214.060680.97676.9204e-081.6940e-071.34330.254415.784688.23860.47251.42551.07894.45430.415415.784688.2386-0.161516.250090.1091
Let's glance at the trial level data for these alternative models.
altTrialData
altTrialData = 3704×17 table
 SubjectIDa0b0a1b1a2b2Chose1alphaOnly_Prob1deltaOnly_Prob1rhoOnly_Prob1ad_Prob1ar_Prob1dr_Prob1noEpsilon_Prob1noGamma_Prob1gammaOnly_Prob1
110116514781310.45450.51520.51520.51520.45450.51520.56060.56060.5000
210116151215200.45450.39390.39390.39390.45450.39390.43940.43940.5000
310116315451400.45450.51520.51520.51520.45450.51520.56060.56060.5000
4101516111071400.45450.39390.39390.39390.45450.39390.43940.43940.5000
51011528913400.45450.39390.39390.39390.45450.39390.43940.43940.5000
610151671412910.45450.51520.51520.51520.45450.51520.56060.56060.5000
710111512421400.45450.39390.39390.39390.45450.39390.43940.43940.5000
810121510741300.45450.39390.39390.39390.45450.39390.43940.43940.5000
910115381014400.45450.39390.39390.39390.45450.39390.43940.43940.5000
1010131510851300.45450.39390.39390.39390.45450.39390.43940.43940.5000
1110115161014200.45450.39390.39390.39390.45450.39390.43940.43940.5000
1210131511741410.45450.39390.39390.39390.45450.39390.43940.43940.5000
1310115311781000.45450.51520.51520.51520.45450.51520.56060.56060.5000
1410114213341200.45450.51520.51520.51520.45450.51520.56060.56060.5000
1510115271014300.45450.39390.39390.39390.45450.39390.43940.43940.5000
1610131612741500.45450.39390.39390.39390.45450.39390.43940.43940.5000
1710115211671000.45450.51520.51520.51520.45450.51520.56060.56060.5000
1810121651311700.45450.51520.51520.51520.45450.51520.56060.56060.5000
1910121641412610.45450.51520.51520.51520.45450.51520.56060.56060.5000
201011166119810.45450.51520.51520.51520.45450.51520.56060.56060.5000
2110131651412710.45450.51520.51520.51520.45450.51520.56060.56060.5000
2210116213561200.45450.51520.51520.51520.45450.51520.56060.56060.5000
2310115314451310.45450.51520.51520.51520.45450.51520.56060.56060.5000
2410131611851400.45450.39390.39390.39390.45450.39390.43940.43940.5000
2510131541412610.45450.51520.51520.51520.45450.51520.56060.56060.5000
2610131661311810.45450.51520.51520.51520.45450.51520.56060.56060.5000
2710116391014500.45450.39390.39390.39390.45450.39390.43940.43940.5000
2810115212561110.45450.51520.51520.51520.45450.51520.56060.56060.5000
291011159741200.45450.39390.39390.39390.45450.39390.43940.43940.5000
301011517913310.45450.39390.39390.39390.45450.39390.43940.43940.5000
3110121613531510.45450.39390.39390.39390.45450.39390.43940.43940.5000
3210121531412500.45450.51520.51520.51520.45450.51520.56060.56060.5000
3310116515671400.45450.51520.51520.51520.45450.51520.56060.56060.5000
3410115312671100.45450.51520.51520.51520.45450.51520.56060.56060.5000
3510116271115310.45450.39390.39390.39390.45450.39390.43940.43940.5000
3610151661513800.45450.51520.51520.51520.45450.51520.56060.56060.5000
3710115114231300.45450.51520.51520.51520.45450.51520.56060.56060.5000
381011327812300.45450.39390.39390.39390.45450.39390.43940.43940.5000
3910141671311910.45450.51520.51520.51520.45450.51520.56060.56060.5000
4010121611741400.45450.39390.39390.39390.45450.39390.43940.43940.5000
4110116211781010.45450.51520.51520.51520.45450.51520.56060.56060.5000
4210111511521400.45450.39390.39390.39390.45450.39390.43940.43940.5000
4310116381115410.45450.39390.39390.39390.45450.39390.43940.43940.5000
4410131610961300.45450.39390.39390.39390.45450.39390.43940.43940.5000
451012149741200.45450.39390.39390.39390.45450.39390.43940.43940.5000
4610121410631310.45450.39390.39390.39390.45450.39390.43940.43940.5000
4710116215341410.45450.51520.51520.51520.45450.51520.56060.56060.5000
4810131641513610.45450.51520.51520.51520.45450.51520.56060.56060.5000
4910151612961510.45450.39390.39390.39390.45450.39390.43940.43940.5000
5010131551311700.45450.51520.51520.51520.45450.51520.56060.56060.5000
51101165101115610.45450.39390.39390.39390.45450.39390.43940.43940.5000
5210116171014310.45450.39390.39390.39390.45450.39390.43940.43940.5000
5310121541311600.45450.51520.51520.51520.45450.51520.56060.56060.5000
5410111613421500.45450.39390.39390.39390.45450.39390.43940.43940.5000
5510116281014410.45450.39390.39390.39390.45450.39390.43940.43940.5000
5610121612631510.45450.39390.39390.39390.45450.39390.43940.43940.5000
5710115313561200.45450.51520.51520.51520.45450.51520.56060.56060.5000
5810121661210810.45450.51520.51520.51520.45450.51520.56060.56060.5000
5910116212671110.45450.51520.51520.51520.45450.51520.56060.56060.5000
6010116513891210.45450.51520.51520.51520.45450.51520.56060.56060.5000
6110115214341300.45450.51520.51520.51520.45450.51520.56060.56060.5000
6210116161115210.45450.39390.39390.39390.45450.39390.43940.43940.5000
631011427913300.45450.39390.39390.39390.45450.39390.43940.43940.5000
6410111531311510.45450.51520.51520.51520.45450.51520.56060.56060.5000
6510115213451200.45450.51520.51520.51520.45450.51520.56060.56060.5000
6610121511631410.45450.39390.39390.39390.45450.39390.43940.43940.5000
6710216514781300.06060.42420.42420.05140.06060.42420.03000.13640.5000
6810216151215200.78790.42420.42420.78780.78790.42421.00000.86360.5000
6910216315451410.06060.42420.42420.12580.06060.42420.13060.13640.5000
70102516111071400.78790.42420.42420.78780.78790.42421.00000.86360.5000
711021528913400.78790.42420.42420.78780.78790.42421.00000.86360.5000
7210251671412900.06060.42420.42420.03910.06060.42425.9968e-050.13640.5000
7310211512421410.78790.42420.42420.78770.78790.42420.99960.86360.5000
7410221510741300.78790.42420.42420.78780.78790.42421.00000.86360.5000
7510215381014410.78790.42420.42420.78780.78790.42421.00000.86360.5000
7610231510851310.78790.42420.42420.78780.78790.42421.00000.86360.5000
7710215161014210.78790.42420.42420.78780.78790.42421.00000.86360.5000
7810231511741410.78790.42420.42420.78780.78790.42421.00000.86360.5000
7910215311781000.06060.42420.42420.04170.06060.42420.00940.13640.5000
8010214213341200.06060.42420.42420.09340.06060.42420.09190.13640.5000
8110215271014310.78790.42420.42420.78780.78790.42421.00000.86360.5000
8210231612741510.78790.42420.42420.78780.78790.42421.00000.86360.5000
8310215211671000.06060.42420.42420.04350.06060.42420.01380.13640.5000
8410221651311700.06060.42420.42420.03910.06060.42428.9042e-050.13640.5000
8510221641412600.06060.42420.42420.03910.06060.42421.9630e-040.13640.5000
861021166119800.06060.42420.42420.03910.06060.42422.7199e-050.13640.5000
8710231651412710.06060.42420.42420.03910.06060.42421.3221e-040.13640.5000
8810216213561200.06060.42420.42420.05940.06060.42420.04390.13640.5000
8910215314451300.06060.42420.42420.09340.06060.42420.09190.13640.5000
9010231611851410.78790.42420.42420.78780.78790.42421.00000.86360.5000
9110231541412600.06060.42420.42420.03910.06060.42421.9630e-040.13640.5000
9210231661311800.06060.42420.42420.03910.06060.42425.9968e-050.13640.5000
9310216391014510.78790.42420.42420.78780.78790.42421.00000.86360.5000
9410215212561100.06060.42420.42420.05140.06060.42420.03000.13640.5000
951021159741210.78790.42420.42420.78780.78790.42421.00000.86360.5000
961021517913310.78790.42420.42420.78780.78790.42421.00000.86360.5000
9710221613531510.78790.42420.42420.78770.78790.42420.99960.86360.5000
9810221531412500.06060.42420.42420.03910.06060.42422.9146e-040.13640.5000
9910216515671400.06060.42420.42420.07250.06060.42420.06380.13640.5000
10010215312671100.06060.42420.42420.04650.06060.42420.02040.13640.5000
Now we can compute BIC for these models
n = 65;
altSubjectData.BIC_M1 = altSubjectData.Deviance_M1 + log(n) * 4;
altSubjectData.BIC_M2 = altSubjectData.Deviance_M2 + log(n) * 4;
altSubjectData.BIC_M3 = altSubjectData.Deviance_M3 + log(n) * 4;
altSubjectData.BIC_M4 = altSubjectData.Deviance_M4 + log(n) * 5;
altSubjectData.BIC_M5 = altSubjectData.Deviance_M5 + log(n) * 5;
altSubjectData.BIC_M6 = altSubjectData.Deviance_M6 + log(n) * 5;
altSubjectData.BIC_M7 = altSubjectData.Deviance_M7 + log(n) * 4;
altSubjectData.BIC_M8 = altSubjectData.Deviance_M8 + log(n) * 5;
altSubjectData.BIC_M9 = altSubjectData.Deviance_M9 + log(n) * 1;
And now we can compare the BIC of all models
modelBIC = [sum(subjectData.BIC), sum(altSubjectData.BIC_M1), sum(altSubjectData.BIC_M2), sum(altSubjectData.BIC_M3), sum(altSubjectData.BIC_M4), ...
sum(altSubjectData.BIC_M5), sum(altSubjectData.BIC_M6), sum(altSubjectData.BIC_M7), sum(altSubjectData.BIC_M8), sum(altSubjectData.BIC_M9)];
 
[minBIC, idx] = min(modelBIC);
disp(idx);
5
So model 4, which has everything except Rho

2.4 Validate the Best Model

First, let’s assess model performance at a basic level: we can look at prediction accuracy to begin
sum(altTrialData.Chose1 == round(altTrialData.ad_Prob1))/height(altTrialData)
ans = 0.7916
Next, we can proceed to look at the distribution of model accuracy across participants:
[f, xi] = ksdensity(altSubjectData.BIC_M4);
plot(xi, f);
title('Density of BIC M4');
xlabel('BIC M4');
ylabel('Density');
Let’s look at the bottom 25% of model performance:
Q3 = prctile(altSubjectData.BIC_M4, 75);
worstExplained = find(altSubjectData.BIC_M4 > Q3);
 
filteredData = altTrialData(ismember(altTrialData.SubjectID, altSubjectData.SubjectID(worstExplained)), :);
 
x_diff = filteredData.a0 - filteredData.a1;
y_diff = filteredData.Chose1 - filteredData.ad_Prob1;
 
figure;
hold on;
uniqueSubjects = unique(filteredData.SubjectID);
colors = lines(length(uniqueSubjects));
for i = 1:length(uniqueSubjects)
idx = filteredData.SubjectID == uniqueSubjects(i);
scatter(x_diff(idx), y_diff(idx), [], 'Color', colors(i));
lsline;
end
hold off;
xlabel('a0 - a1');
ylabel('Chose1 - ad\_Prob1');
Nothing systematic here
x_diff = filteredData.a0 - filteredData.a2;
 
figure;
hold on;
uniqueSubjects = unique(filteredData.SubjectID);
colors = lines(length(uniqueSubjects));
for i = 1:length(uniqueSubjects)
idx = filteredData.SubjectID == uniqueSubjects(i);
scatter(x_diff(idx), y_diff(idx), [], 'Color', colors(i));
lsline;
end
hold off;
xlabel('a0 - a2');
ylabel('Chose1 - ad\_Prob1');
Systematically overpredicting choosing 1 when player A has more and undepredicted when player A has less. This suggests a bias that only seems to apply when looking at choice 2 because we don't see this on choice 1.
x_diff = filteredData.b0 - filteredData.b1;
 
figure;
hold on;
uniqueSubjects = unique(filteredData.SubjectID);
colors = lines(length(uniqueSubjects));
for i = 1:length(uniqueSubjects)
idx = filteredData.SubjectID == uniqueSubjects(i);
scatter(x_diff(idx), y_diff(idx), [], 'Color', colors(i));
lsline;
end
hold off;
xlabel('b0 - b1');
ylabel('Chose1 - ad\_Prob1');
This is fine
x_diff = filteredData.b0 - filteredData.b2;
 
figure;
hold on;
uniqueSubjects = unique(filteredData.SubjectID);
colors = lines(length(uniqueSubjects));
for i = 1:length(uniqueSubjects)
idx = filteredData.SubjectID == uniqueSubjects(i);
scatter(x_diff(idx), y_diff(idx), [], 'Color', colors(i));
lsline;
end
hold off;
xlabel('b0 - b2');
ylabel('Chose1 - ad\_Prob1');
Same issue as before - so it seems that these people have a preference/bias for player A when player B has more, but this only applies to Choice 1. Not very useful information, but we're dealing with a trialset that is probably too small for the number of participants we have. Anyways, we can't do anything with this information, we can just bear it in mind as we proceed to check assumptions: first linearity (we’ll do this across both choices)
figure;
hold on;
 
groups = altTrialData.a0 < altTrialData.b0;
uniqueGroups = unique(groups);
titles = ["a0 < b0", "a0 > b0"];
colors = ["r*", "b*"];
for i = 1:length(uniqueGroups)
idx = groups == uniqueGroups(i);
plot(altTrialData.ad_Prob1(idx), altTrialData.Chose1(idx), colors(i));
end
lsline;
xlabel('ad\_Prob1');
ylabel('Chose1');
legend(titles)
hold off
This isn't great, but it's fine. Second, normality of error:
residuals = altTrialData.ad_Prob1 - altTrialData.Chose1;
residual_std = std(residuals);
normvals = normrnd(0, residual_std, 1000, 1);
[f, xi] = ksdensity(residuals, 'Bandwidth', residual_std);
[f_norm, xi_norm] = ksdensity(normvals, 'Bandwidth', residual_std);
figure;
hold on;
plot(xi, f, 'LineWidth', 2, 'Color', 'b');
plot(xi_norm, f_norm, 'LineWidth', 2, 'Color', 'r');
xlabel('Residuals');
ylabel('Density');
legend({'Actual', 'Predicted'});
hold off;
Looks very good. Third we can examine homoscedasticity:
x_vals = altTrialData.a1 - altTrialData.a2;
y_vals = altTrialData.ad_Prob1 - altTrialData.Chose1;
scatter(x_vals, y_vals);
hold on;
 
lsline;
 
mdl = fitlm(x_vals, y_vals);
 
[x_sorted, sortIdx] = sort(x_vals);
[y_pred, y_ci] = predict(mdl, x_sorted);
 
fill([x_sorted; flipud(x_sorted)], [y_ci(:,1); flipud(y_ci(:,2))], 'k', ...
'FaceAlpha', 0.2, 'EdgeColor', 'none');
 
xlabel('a1 - a2');
ylabel('ad\_Prob1 - Chose1');
hold off;
Nice constant variance cloud across all X values. And finally independence of error:
figure;
hold on;
 
groups = altTrialData.a0 < altTrialData.b0;
uniqueGroups = unique(groups);
titles = ["a0 < b0", "a0 > b0"];
colors = ["r*", "b*"];
for i = 1:length(uniqueGroups)
idx = groups == uniqueGroups(i);
x = altTrialData.a1(idx) - altTrialData.a2(idx);
diff = altTrialData.Chose1(idx) - altTrialData.ad_Prob1(idx);
plot(x, diff, colors(i));
end
lsline;
xlabel('a1 - a2');
ylabel('Chose1 - ad\_Prob1');
legend(titles)
hold off
The model seems to have a slight bias, under-predicting the likelihood of choosing option 1 when Player A has less than Player B and Choice 2 is better for player A. But this seems to not be such a big issue.
Let’s assess the independence: i.e. the extent to which our model captures all differences in choice behavior between different people:
altTrialData.a0greaterThanb0 = altTrialData.a0 > altTrialData.b0
altTrialData = 3704×18 table
 SubjectIDa0b0a1b1a2b2Chose1alphaOnly_Prob1deltaOnly_Prob1rhoOnly_Prob1ad_Prob1ar_Prob1dr_Prob1noEpsilon_Prob1noGamma_Prob1gammaOnly_Prob1a0greaterThanb0
110116514781310.45450.51520.51520.51520.45450.51520.56060.56060.50001
210116151215200.45450.39390.39390.39390.45450.39390.43940.43940.50001
310116315451400.45450.51520.51520.51520.45450.51520.56060.56060.50001
4101516111071400.45450.39390.39390.39390.45450.39390.43940.43940.50000
51011528913400.45450.39390.39390.39390.45450.39390.43940.43940.50001
610151671412910.45450.51520.51520.51520.45450.51520.56060.56060.50000
710111512421400.45450.39390.39390.39390.45450.39390.43940.43940.50000
810121510741300.45450.39390.39390.39390.45450.39390.43940.43940.50000
910115381014400.45450.39390.39390.39390.45450.39390.43940.43940.50001
1010131510851300.45450.39390.39390.39390.45450.39390.43940.43940.50000
1110115161014200.45450.39390.39390.39390.45450.39390.43940.43940.50001
1210131511741410.45450.39390.39390.39390.45450.39390.43940.43940.50000
1310115311781000.45450.51520.51520.51520.45450.51520.56060.56060.50001
1410114213341200.45450.51520.51520.51520.45450.51520.56060.56060.50001
1510115271014300.45450.39390.39390.39390.45450.39390.43940.43940.50001
1610131612741500.45450.39390.39390.39390.45450.39390.43940.43940.50000
1710115211671000.45450.51520.51520.51520.45450.51520.56060.56060.50001
1810121651311700.45450.51520.51520.51520.45450.51520.56060.56060.50000
1910121641412610.45450.51520.51520.51520.45450.51520.56060.56060.50000
201011166119810.45450.51520.51520.51520.45450.51520.56060.56060.50000
2110131651412710.45450.51520.51520.51520.45450.51520.56060.56060.50000
2210116213561200.45450.51520.51520.51520.45450.51520.56060.56060.50001
2310115314451310.45450.51520.51520.51520.45450.51520.56060.56060.50001
2410131611851400.45450.39390.39390.39390.45450.39390.43940.43940.50000
2510131541412610.45450.51520.51520.51520.45450.51520.56060.56060.50000
2610131661311810.45450.51520.51520.51520.45450.51520.56060.56060.50000
2710116391014500.45450.39390.39390.39390.45450.39390.43940.43940.50001
2810115212561110.45450.51520.51520.51520.45450.51520.56060.56060.50001
291011159741200.45450.39390.39390.39390.45450.39390.43940.43940.50000
301011517913310.45450.39390.39390.39390.45450.39390.43940.43940.50001
3110121613531510.45450.39390.39390.39390.45450.39390.43940.43940.50000
3210121531412500.45450.51520.51520.51520.45450.51520.56060.56060.50000
3310116515671400.45450.51520.51520.51520.45450.51520.56060.56060.50001
3410115312671100.45450.51520.51520.51520.45450.51520.56060.56060.50001
3510116271115310.45450.39390.39390.39390.45450.39390.43940.43940.50001
3610151661513800.45450.51520.51520.51520.45450.51520.56060.56060.50000
3710115114231300.45450.51520.51520.51520.45450.51520.56060.56060.50001
381011327812300.45450.39390.39390.39390.45450.39390.43940.43940.50001
3910141671311910.45450.51520.51520.51520.45450.51520.56060.56060.50000
4010121611741400.45450.39390.39390.39390.45450.39390.43940.43940.50000
4110116211781010.45450.51520.51520.51520.45450.51520.56060.56060.50001
4210111511521400.45450.39390.39390.39390.45450.39390.43940.43940.50000
4310116381115410.45450.39390.39390.39390.45450.39390.43940.43940.50001
4410131610961300.45450.39390.39390.39390.45450.39390.43940.43940.50000
451012149741200.45450.39390.39390.39390.45450.39390.43940.43940.50000
4610121410631310.45450.39390.39390.39390.45450.39390.43940.43940.50000
4710116215341410.45450.51520.51520.51520.45450.51520.56060.56060.50001
4810131641513610.45450.51520.51520.51520.45450.51520.56060.56060.50000
4910151612961510.45450.39390.39390.39390.45450.39390.43940.43940.50000
5010131551311700.45450.51520.51520.51520.45450.51520.56060.56060.50000
51101165101115610.45450.39390.39390.39390.45450.39390.43940.43940.50001
5210116171014310.45450.39390.39390.39390.45450.39390.43940.43940.50001
5310121541311600.45450.51520.51520.51520.45450.51520.56060.56060.50000
5410111613421500.45450.39390.39390.39390.45450.39390.43940.43940.50000
5510116281014410.45450.39390.39390.39390.45450.39390.43940.43940.50001
5610121612631510.45450.39390.39390.39390.45450.39390.43940.43940.50000
5710115313561200.45450.51520.51520.51520.45450.51520.56060.56060.50001
5810121661210810.45450.51520.51520.51520.45450.51520.56060.56060.50000
5910116212671110.45450.51520.51520.51520.45450.51520.56060.56060.50001
6010116513891210.45450.51520.51520.51520.45450.51520.56060.56060.50001
6110115214341300.45450.51520.51520.51520.45450.51520.56060.56060.50001
6210116161115210.45450.39390.39390.39390.45450.39390.43940.43940.50001
631011427913300.45450.39390.39390.39390.45450.39390.43940.43940.50001
6410111531311510.45450.51520.51520.51520.45450.51520.56060.56060.50000
6510115213451200.45450.51520.51520.51520.45450.51520.56060.56060.50001
6610121511631410.45450.39390.39390.39390.45450.39390.43940.43940.50000
6710216514781300.06060.42420.42420.05140.06060.42420.03000.13640.50001
6810216151215200.78790.42420.42420.78780.78790.42421.00000.86360.50001
6910216315451410.06060.42420.42420.12580.06060.42420.13060.13640.50001
70102516111071400.78790.42420.42420.78780.78790.42421.00000.86360.50000
711021528913400.78790.42420.42420.78780.78790.42421.00000.86360.50001
7210251671412900.06060.42420.42420.03910.06060.42425.9968e-050.13640.50000
7310211512421410.78790.42420.42420.78770.78790.42420.99960.86360.50000
7410221510741300.78790.42420.42420.78780.78790.42421.00000.86360.50000
7510215381014410.78790.42420.42420.78780.78790.42421.00000.86360.50001
7610231510851310.78790.42420.42420.78780.78790.42421.00000.86360.50000
7710215161014210.78790.42420.42420.78780.78790.42421.00000.86360.50001
7810231511741410.78790.42420.42420.78780.78790.42421.00000.86360.50000
7910215311781000.06060.42420.42420.04170.06060.42420.00940.13640.50001
8010214213341200.06060.42420.42420.09340.06060.42420.09190.13640.50001
8110215271014310.78790.42420.42420.78780.78790.42421.00000.86360.50001
8210231612741510.78790.42420.42420.78780.78790.42421.00000.86360.50000
8310215211671000.06060.42420.42420.04350.06060.42420.01380.13640.50001
8410221651311700.06060.42420.42420.03910.06060.42428.9042e-050.13640.50000
8510221641412600.06060.42420.42420.03910.06060.42421.9630e-040.13640.50000
861021166119800.06060.42420.42420.03910.06060.42422.7199e-050.13640.50000
8710231651412710.06060.42420.42420.03910.06060.42421.3221e-040.13640.50000
8810216213561200.06060.42420.42420.05940.06060.42420.04390.13640.50001
8910215314451300.06060.42420.42420.09340.06060.42420.09190.13640.50001
9010231611851410.78790.42420.42420.78780.78790.42421.00000.86360.50000
9110231541412600.06060.42420.42420.03910.06060.42421.9630e-040.13640.50000
9210231661311800.06060.42420.42420.03910.06060.42425.9968e-050.13640.50000
9310216391014510.78790.42420.42420.78780.78790.42421.00000.86360.50001
9410215212561100.06060.42420.42420.05140.06060.42420.03000.13640.50001
951021159741210.78790.42420.42420.78780.78790.42421.00000.86360.50000
961021517913310.78790.42420.42420.78780.78790.42421.00000.86360.50001
9710221613531510.78790.42420.42420.78770.78790.42420.99960.86360.50000
9810221531412500.06060.42420.42420.03910.06060.42422.9146e-040.13640.50000
9910216515671400.06060.42420.42420.07250.06060.42420.06380.13640.50001
10010215312671100.06060.42420.42420.04650.06060.42420.02040.13640.50001
ric_model = fitglme(altTrialData, 'Chose1 ~ ad_Prob1 + a0greaterThanb0 + (1 | SubjectID)', 'Distribution', 'binomial');
ric_model.Rsquared
ans = struct with fields:
Ordinary: 0.6858 Adjusted: 0.6856
So there are no convergence warnings here (i.e. that we reached the maximum number of iterations - or attempts to find a solution - without satisfying the tolerance - or the criteria necessary to stop looking). Let's look at the R squared of the standard model for comparison.
model = fitglm(altTrialData, 'Chose1 ~ ad_Prob1', 'Distribution', 'binomial');
model.Rsquared
ans = struct with fields:
Ordinary: 0.4435 Adjusted: 0.4434 LLR: 0.3890 Deviance: 0.3890 AdjGeneralized: 0.5542
So there is a lot of unexplained variance in the standard model. Let's compare to the model with only the random intercept now:
ri_model = fitglme(altTrialData, 'Chose1 ~ ad_Prob1 + (1 | SubjectID)', 'Distribution', 'binomial');
ri_model.Rsquared
ans = struct with fields:
Ordinary: 0.6635 Adjusted: 0.6635
Wow, so the condition really only explains a very small amount of varance - there's a lot of indivdual variance that the model doesn't pick up. Let's compare this to the 3 norm model:
threeNormmodel = fitglm(trialData, 'Chose1 ~ Prob1', 'Distribution', 'binomial');
threeNormmodel.Rsquared
ans = struct with fields:
Ordinary: 0.4399 Adjusted: 0.4398 LLR: 0.3868 Deviance: 0.3868 AdjGeneralized: 0.5518
So somehow there's a lot of indivdual variance that our models just don't detect whatsoever. Let's see if we can trigger a convergence warning by including a random intercept of condition.
ris_model = fitglme(altTrialData, 'Chose1 ~ ad_Prob1 + a0greaterThanb0 + (1 + a0greaterThanb0 | SubjectID)', 'Distribution', 'binomial');
ris_model.Rsquared
ans = struct with fields:
Ordinary: 0.6891 Adjusted: 0.6890
No convergence warnings even here. Okay, let's take it onboard that our model is somehow missing 20% of the apparent variance in this behavior. We can proceed to fivefold validation
fivefold = table();
trialData.Prob1_ff = zeros(height(trialData), 1);
for i = 1:length(included_subjects)
df = grab_data(included_subjects(i));
df.Prob1 = zeros(height(df), 1);
order = randperm(height(df));
A_ff = zeros(1, 5);
D_ff = zeros(1, 5);
B_ff = zeros(1, 5);
E_ff = zeros(1, 5);
G_ff = zeros(1, 5);
df.Pred = zeros(height(df), 1);
 
for z = 1:5
j = round((z - 1) * (height(df) / 5) + 1);
n = round(z * (height(df) / 5));
withheld = order(j:n);
 
result_ff = optimize(@of_ad, initial_params([1, 2, 4:6]), lower_bounds([1, 2, 4:6]), upper_bounds([1, 2, 4:6]), df(~ismember(1:height(df), withheld), :));
A_ff(z) = result_ff(1);
D_ff(z) = result_ff(2);
B_ff(z) = result_ff(3);
E_ff(z) = result_ff(4);
G_ff(z) = result_ff(5);
 
pars = [result_ff(1:2), 0, result_ff(3:5)];
df.Prob1(withheld) = generatePredictions(pars, df(withheld, :));
end
 
Deviance_ff = -2 * sum(df.Chose1 .* log(df.Prob1) + (1 - df.Chose1) .* log(1 - df.Prob1));
fivefold(i, 1:27) = {included_subjects(i), Deviance_ff, ...
A_ff(1), A_ff(2), A_ff(3), A_ff(4), A_ff(5), ...
D_ff(1), D_ff(2), D_ff(3), D_ff(4), D_ff(5), ...
B_ff(1), B_ff(2), B_ff(3), B_ff(4), B_ff(5), ...
E_ff(1), E_ff(2), E_ff(3), E_ff(4), E_ff(5), ...
G_ff(1), G_ff(2), G_ff(3), G_ff(4), G_ff(5)};
trialData.Prob1_ff(trialData.SubjectID == included_subjects(i)) = df.Prob1;
end
 
fivefold.Properties.VariableNames = {'SubjectID', 'Deviance', ...
'A_F1', 'A_F2', 'A_F3', 'A_F4', 'A_F5', ...
'D_F1', 'D_F2', 'D_F3', 'D_F4', 'D_F5', ...
'B_F1', 'B_F2', 'B_F3', 'B_F4', 'B_F5', ...
'E_F1', 'E_F2', 'E_F3', 'E_F4', 'E_F5', ...
'G_F1', 'G_F2', 'G_F3', 'G_F4', 'G_F5'}
fivefold = 57×27 table
 SubjectIDDevianceA_F1A_F2A_F3A_F4A_F5D_F1D_F2D_F3D_F4D_F5B_F1B_F2B_F3B_F4B_F5E_F1E_F2E_F3E_F4E_F5G_F1G_F2G_F3G_F4G_F5
110197.18800.47820.48110.41451.21030.24821.43071.39621.46190.75691.48696.30716.04205.90124.02854.25330.47640.45000.40380.50000.3964-0.0877-0.0556-0.1190-0.00940.0045
210267.70721.10950.84511.59940.87061.15230.16460.12020.10640.11880.20342.62404.04877.46463.95693.30830.13470.15630.12590.10100.1092-0.5000-0.3297-0.3631-0.2338-0.2933
310321.82851.51471.49241.48811.47351.48820.14520.13560.13680.13300.13556.62146.67896.63056.75606.69550.01864.8216e-050.02090.01860.0200-0.4981-0.0162-0.4984-0.4981-0.4984
410421.93391.59411.59471.47711.63301.59380.10110.10200.13370.07450.09827.49177.50306.67846.32937.55420.01790.01934.8248e-050.02000.01860.49800.4982-0.00890.49830.4981
510522.89190.46124.0252e-063.3869e-060.47916.5183e-061.63341.03241.04261.43770.11885.13740.63970.65386.52235.36810.04000.02190.02980.02281.3567e-05-0.4991-0.5000-0.5000-0.4985-0.3360
610666.94481.61521.55391.61401.60161.59760.10030.09080.10930.09680.09877.63327.53447.50967.43987.60460.18960.11540.15380.19230.1141-0.1836-3.5427e-08-0.3750-0.2000-0.1494
710775.08620.26880.37560.33440.25350.31251.60471.68571.50681.60191.57493.96194.29004.12674.10734.61180.22710.26070.20540.18930.2464-0.0283-0.1164-0.1957-0.0283-0.1243
810893.23990.30161.98680.82200.82591.1661e-040.90491.36040.67500.64621.01940.09939.93821.79471.78910.17500.13230.38780.35500.39170.4661-0.4999-0.1668-0.2839-0.1939-0.1502
910977.75701.74920.36730.27490.20871.2049e-071.70061.30901.66911.53900.05949.35966.14923.87124.48403.72880.44220.32920.34620.32640.2733-0.2234-0.3178-0.2778-0.3298-0.2838
1011026.95988.7831e-063.4212e-064.6848e-065.3349e-060.43141.04701.03471.04261.04281.54890.72700.67220.67440.72694.04120.06350.05380.07440.05160.0417-0.5000-0.5000-0.5000-0.5000-0.4992
111110.00691.50291.49051.48741.49281.49220.12770.13520.13890.13990.13566.13196.66386.68066.66706.68154.7490e-054.7845e-054.8289e-054.7938e-054.7864e-050.0954-0.0182-0.01390.0012-0.0180
1211289.18371.19140.40651.57401.60070.61541.36150.87951.58881.63420.74237.32830.40129.60379.19710.45220.27470.23220.25600.27670.0976-0.0141-0.0816-0.1663-0.1917-0.1630
131130.00681.49061.49051.49031.48901.48870.13950.13510.13890.13620.13396.66586.66426.65196.64266.63814.7844e-054.7751e-054.8447e-054.7911e-054.7794e-05-0.0067-2.9492e-04-0.0252-0.0123-0.0077
1411468.93811.05941.06121.07171.07671.04190.17750.10860.10390.14383.1824e-063.90651.54001.79831.95561.01450.15690.20090.16660.18650.1779-0.3593-0.2982-0.2821-0.2937-0.5000
1511540.61521.49781.48201.48181.47251.47890.13620.13940.13370.13310.13816.56656.48426.62316.55916.56630.13340.16670.11110.13050.1400-0.4997-0.4998-0.4997-0.4998-0.4998
1611631.76681.49661.49571.48991.48561.53330.13650.13140.13930.13700.14106.64366.37176.54566.56456.54030.08340.09260.08000.10000.0962-0.4996-0.4996-0.4996-0.4996-0.4997
1711782.43320.43780.24010.24701.25041.21971.58811.52381.56191.99991.98594.44404.26534.21849.99979.93550.27110.25300.19230.27920.2284-0.0902-0.0765-0.2000-0.01150.0472
1811861.24371.60631.56291.62491.57491.57300.09980.09840.10390.08960.10187.61177.46657.25267.34947.35610.20840.18670.20240.23810.1538-0.3345-0.3929-0.2941-0.3250-0.3750
1911922.15911.51631.49211.48491.48571.49230.13450.13570.15180.13700.13536.45286.70096.61866.87136.67780.02004.7720e-050.01730.01930.0209-0.49840.0011-0.4979-0.4983-0.4985
2012093.52680.51271.45160.86700.52151.28871.48101.99931.11721.46940.80204.00349.99654.10143.99696.07050.50000.47990.50000.50000.5000-0.0192-0.0849-0.0769-0.0962-0.1538
2112188.61960.98550.62431.50580.94081.06270.16091.36830.08001.11760.15220.39743.99356.77584.11891.72400.22010.50000.31550.50000.3120-0.4577-0.1346-0.1604-0.2115-0.3582
221220.00711.47531.49101.48671.48701.49000.13370.13580.13590.14890.13526.74886.69946.63926.66186.66794.7765e-054.7729e-054.8605e-054.7861e-054.7751e-05-0.0094-0.0139-0.02860.0036-0.0066
2312486.45892.00001.04532.00000.98101.59950.57610.16380.36350.19020.09459.99991.61759.99994.21377.35080.30560.25180.29820.30130.2629-0.04560.0600-0.02000.05170.0246
2412523.62351.20721.09571.11331.10131.59300.08420.09450.10560.10770.10373.58741.77172.56222.35587.50270.03944.8474e-060.02920.03280.01860.4991-0.34800.49880.49920.4981
2512673.86460.92761.57601.11121.56361.57431.12160.09990.85600.09340.09544.05697.40954.12427.22487.28280.50000.41670.50000.38770.3681-0.2255-0.3000-0.2885-0.2937-0.3491
2612791.77831.22101.61041.59361.09101.61370.68050.10250.09040.85440.10094.05007.77147.50323.99117.64680.50000.35750.36540.50000.4289-0.0472-0.0697-0.1842-0.1038-0.0628
2712865.55101.49081.50221.49001.49151.47580.13530.13790.14130.13530.13506.66236.70916.73146.68316.54700.05360.05560.06254.7776e-050.0536-0.4994-0.4994-0.4995-0.0197-0.4994
2812991.68511.28441.25991.28520.25351.26132.00002.00002.00001.42572.00009.99999.99999.99994.959810.00000.34590.33040.33620.29260.3889-0.0545-0.1048-0.0743-0.1835-0.0707
2913021.86171.50041.49011.49181.49831.51370.13970.13570.13620.14060.13776.70156.70386.66986.66436.66410.02004.8635e-050.01790.02090.0193-0.4983-0.0050-0.4980-0.4984-0.4982
3013142.62821.61211.59151.56301.59161.60810.10090.09930.08620.10150.10127.60517.50007.50727.39707.57030.09570.09140.09620.09140.0757-0.1269-0.2812-0.1000-0.2812-0.0283
3113259.74710.31821.5791e-050.30310.47480.25811.68841.03121.52231.54281.49094.01370.70174.82756.59694.43050.14500.08160.08000.14890.1339-0.3621-0.3464-0.4996-0.3708-0.3617
3213385.36850.38260.54850.52990.50901.46390.88600.79650.86030.82371.99950.39320.49380.96360.32479.99770.26020.26630.33550.18040.3591-0.2117-0.4033-0.2280-0.5000-0.2153
3313490.55351.57471.06601.03100.69501.04590.08950.18450.20231.28440.13737.34021.88551.53303.99971.37850.32520.37700.30640.50000.3901-0.0695-0.2106-0.1822-0.0769-0.1387
3413593.35051.40441.51991.51931.27321.86122.00001.99881.99772.00001.999610.00009.99419.98899.99989.99820.32040.33610.36520.33170.3571-0.0920-0.1878-0.1346-0.1985-0.2000
3513655.55981.60771.56681.46961.58551.59440.10120.09250.13540.10410.09987.45307.41816.54937.52467.48390.13390.10930.09620.11470.0762-0.3617-0.3170-0.4996-0.3385-0.2570
3613884.03170.24120.45760.23440.26160.23970.69221.42811.57191.46971.49394.42876.68814.03904.53174.19480.44030.33640.31190.34520.3523-0.2380-0.0946-0.3312-0.2241-0.1452
3714096.01680.48210.29431.00510.90871.71321.47491.55331.25051.14562.00006.29524.17706.26995.454510.00000.42560.40480.34220.30980.32010.03850.01470.0179-0.0233-0.0371
3814191.34651.59591.55551.51700.62242.00001.60821.55381.51641.17301.75109.03828.81079.03244.192410.00000.36680.46880.40050.50000.4027-0.1632-0.0511-0.1072-0.0686-0.0776
3914298.55301.58101.56591.53440.69301.99870.09330.09430.08931.28391.40047.53247.38127.23233.98679.99340.39670.41450.41370.50000.4116-0.0332-0.0361-0.1475-0.0283-0.2004
4014386.40860.47761.59691.53961.01481.62141.51360.09350.08870.95400.09063.98627.45917.51234.12217.49840.50000.26920.24670.50000.2197-0.1154-0.2857-0.1757-0.1346-0.1207
4114421.51131.48381.48581.49741.49281.49210.13720.13930.13440.13410.13596.63456.61166.60866.61586.64940.01930.01790.02090.02094.9496e-05-0.4982-0.4980-0.4984-0.4984-0.0319
4214589.79920.62791.54072.00001.99971.82721.20211.50860.60971.85111.76565.31028.58529.99989.99879.52030.50000.39180.33650.40720.4254-0.1400-0.1666-0.2654-0.2368-0.2103
4314693.63280.57420.34592.00002.00002.00001.41951.09641.54841.65181.64864.01282.94939.99999.99999.99990.50000.47930.40070.40780.4269-0.1415-0.2079-0.1872-0.2242-0.2028
441470.00730.45150.46630.56520.47830.63121.36641.35791.33561.34471.46654.19474.11235.81324.14455.33484.8934e-055.6209e-055.4811e-054.8731e-055.4675e-05-0.00550.0365-0.00479.4005e-040.0123
4514834.11811.49201.50591.48291.55871.47450.13820.13370.13430.14930.13016.70436.53766.53766.63356.55640.08340.06000.11540.09680.0962-0.4996-0.4995-0.4997-0.4996-0.4997
4614922.69402.00001.99971.99971.99951.48980.38120.36350.36350.36350.13889.99999.99869.99869.99736.66153.2270e-063.0099e-052.9809e-054.2908e-054.7838e-05-0.0168-0.3240-0.3212-0.27590.0060
4715080.66910.57980.52300.12230.31171.40460.99000.84800.99451.40752.00002.85083.91320.15224.05519.99990.29890.28810.07310.33590.3198-0.1814-0.2926-0.5000-0.1617-0.2010
4815163.02351.58171.56931.63901.58801.58850.08510.09470.09030.09320.09877.32717.55877.37677.52007.52510.21970.23190.23520.20820.2121-0.1207-0.2187-0.0951-0.1725-0.2857
4915267.99101.63191.02511.05261.09161.05770.09070.01390.07381.6190e-064.8798e-067.40820.83381.18821.08741.05270.18870.14510.14350.21150.1896-0.0094-0.1382-0.1618-0.1102-0.0828
50153101.90170.29990.34750.35070.37301.83611.56101.62431.44131.59281.99854.43824.00624.14184.65159.99240.37680.41310.44220.35210.4870-0.04060.12760.00250.15920.0436
5115442.29131.59751.50541.60391.59781.60220.09530.13510.10610.10210.10007.34756.61707.53077.50057.58030.03770.01860.03870.01790.03810.0094-0.4982-0.03850.49800.0472
5215549.11392.00002.00002.00001.59112.00000.37880.36270.36240.10230.361810.000010.000010.00007.48509.99980.03370.05730.06030.03770.04140.05040.13730.1820-0.00940.4994
5315669.60660.49970.49800.48440.47000.45420.96350.96250.94791.44340.92382.22762.22721.89836.55701.70220.04110.03820.03610.03710.0019-0.3919-0.3501-0.4648-0.49910.4999
54157116.69871.01252.00001.99101.99832.00001.9405e-060.75021.34761.37370.75080.286810.00009.96029.991610.00000.16000.28690.26470.23000.3318-0.5000-0.1365-0.3158-0.5000-0.1669
5515892.96590.64041.27780.25271.24541.29041.20542.00001.53010.78412.00004.09109.99984.11913.98349.99990.50000.44620.43750.50000.4643-0.0660-0.0789-0.0714-0.0660-0.0572
5616059.19460.49230.31680.49570.26250.34831.45531.55041.44991.38841.55046.70614.70916.36445.01014.81790.15560.17310.17110.11560.15260.14290.05560.1494-0.01920.1068
5716190.52720.48901.30001.41860.33440.25931.42120.42090.42461.60681.54536.41444.09704.07264.56084.22880.38460.50000.50000.37200.3958-0.2000-0.1923-0.2115-0.2200-0.1316
Now we can check the model accuracy
sum(trialData.Chose1 == round(trialData.Prob1_ff))/height(trialData)
ans = 0.7681
That's not such a bad drop in model accuracy - only around 2%. Now let's test it agains the normally recovered model:
fivefold.BIC = fivefold.Deviance + log(65) * 6;
[~, p, ~, stats] = ttest(fivefold.BIC, altSubjectData.BIC_M4); stats.p = round(p, 3)
stats = struct with fields:
tstat: 11.0559 df: 56 sd: 9.8826 p: 0
So it's significantly better, but that's not surprising and not concerning given the small decrease in accuracy. Let's look at the similarlity of the parameters - first, let's compute cosine similarity.
cosines = zeros(1, 25);
 
function cos = cosine(A, B)
cos = dot(A, B) / (norm(A) * norm(B));
end
 
for i = 1:5
cosines(i) = cosine(altSubjectData.Alpha_M4, fivefold{:, i + 2});
cosines(i + 5) = cosine(altSubjectData.Delta_M4, fivefold{:, i + 7});
cosines(i + 10) = cosine(altSubjectData.Beta_M4, fivefold{:, i + 12});
cosines(i + 15) = cosine(altSubjectData.Epsilon_M4, fivefold{:, i + 17});
cosines(i + 20) = cosine(altSubjectData.Gamma_M4, fivefold{:, i + 22});
end
Now let's see the cosine for all parameters - first alpha:
mean(cosines(1:5))
ans = 0.9489
Very good. Now delta
mean(cosines(6:11))
ans = 0.9336
Again, very good. Now beta
mean(cosines(11:15))
ans = 0.9235
Also good. Now epsilon
mean(cosines(16:21))
ans = 0.9777
Nice. Finally gamma
mean(cosines(21:25))
ans = 0.8917
Everything looks good here. Let's go to the fun part - model testing.

3.1 Compare Models

First, let's test the alpha + delta model agains the full model (which has alpha + delta + rho)
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, subjectData.BIC); stats.p = round(p, 3)
stats = struct with fields:
tstat: -13.4929 df: 56 sd: 2.4684 p: 0
So M4 has a significantly lower BIC that the full model (i.e. p < 0.001), meaning it is significantly better to ignore rank-reversal. How about the alpha only model (M1)
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_M1); stats.p = round(p, 3)
stats = struct with fields:
tstat: -1.5722 df: 56 sd: 20.1206 p: 0.1220
So M4 is better, but not significantly better meaning that including harm aversion does not significanlty improve model performance. How about the delta only model (M2)
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_M2); stats.p = round(p, 3)
stats = struct with fields:
tstat: -5.2137 df: 56 sd: 33.6938 p: 0
So including inequality aversion significantly improves model performance. How about the rho only model (M3)
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_M3); stats.p = round(p, 3)
stats = struct with fields:
tstat: -5.2403 df: 56 sd: 33.6367 p: 0
Including both inequality aversion and harm aversion significantly improves model performance. Let's look at the inequality and rank reversal aversion model (M5)
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_M5); stats.p = round(p, 3)
stats = struct with fields:
tstat: -1.7419 df: 56 sd: 2.3230 p: 0.0870
Switching harm aversion for rank reversal aversion does not significanlty lessen model performance. Interesting - now we can examine the harm and rank reversal aversion model (M6)
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_M6); stats.p = round(p, 3)
stats = struct with fields:
tstat: -6.1536 df: 56 sd: 33.6839 p: 0
Switching inequality aversion for rank reversal aversion significantly lessens model performance. How about if we remove the noise and bias parameters from the full model
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_M7); stats.p = round(p, 3)
stats = struct with fields:
tstat: -3.5271 df: 56 sd: 100.1979 p: 1.0000e-03
So it does significantly hurt the model performance compared to our favored model - but let's compare apples to apples here:
[~, p, ~, stats] = ttest(subjectData.BIC, altSubjectData.BIC_M7); stats.p = round(p, 3)
stats = struct with fields:
tstat: -3.1910 df: 56 sd: 100.3137 p: 0.0020
So including these significantly improves model performance. We may wish to verify by instantiating this model as a variant of M4 instead of the full model, but at the moment it's not extremely urgent to do. Let's move onto M8 which only removes the bias parameter
[~, p, ~, stats] = ttest(subjectData.BIC, altSubjectData.BIC_M8); stats.p = round(p, 3)
stats = struct with fields:
tstat: 1.3248 df: 56 sd: 3.8013 p: 0.1910
It seems that including the bias parameter might not be extremely important, so we again may wish to rerun M8 as a variant of M4. But that is for another time. Let's finish up with M9 and that only has left right preferences
[~, p, ~, stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_M9); stats.p = round(p, 3); stats
stats = struct with fields:
tstat: -4.9360 df: 56 sd: 31.8217 p: 0
So that's significant, but not as big as we might expect.

Here, we can conclude that inequality aversion plays a significant role in these decisions, but harm aversion does not. Also, these decisions tend to be very noisy, but not significantly determined by biases for chosing left versus right.

3.3 Test for Individual Differences

Let’s first recover parameters over the whole dataset and we can assess how accurate it is
resultNID = optimize(@of_ad, initial_params([1,2,4:6]), lower_bounds([1,2,4:6]), upper_bounds([1,2,4:6]), trialData);
pars = [resultNID(1:2), 0, resultNID(3:5)];
trialData.Prob1_NID = generatePredictions(pars, trialData);
sum(trialData.Chose1 == round(trialData.Prob1_NID))/height(trialData)
ans = 0.5967
Not very good. This isn’t surprising given that people often have very different preferences. Now let’s test for individual differences.
altSubjectData.Deviance_NID = zeros(size(altSubjectData,1), 1);
 
for i = 1:length(included_subjects)
trials = find(included_subjects(i) == trialData.SubjectID);
df = trialData(trials, :);
altSubjectData.Deviance_NID(i) = -2 * sum(df.Chose1 .* log(df.Prob1_NID) + (1 - df.Chose1) .* log(1 - df.Prob1_NID));
end
 
altSubjectData.BIC_NID = altSubjectData.Deviance_NID + log(65) * 5 / length(included_subjects);
[~,p,~,stats] = ttest(altSubjectData.BIC_M4, altSubjectData.BIC_NID); stats.p = round(p, 3); stats
stats = struct with fields:
tstat: -3.6358 df: 56 sd: 30.0943 p: 1.0000e-03
Significant individual differences. Let’s see which models are worse
find(modelBIC > sum(altSubjectData.BIC_NID))
ans = 1×5
3 4 7 8 10
So M2 (delta only), M3 (rho only), M6 (delta + rho), M7 (no epsilon or gamma), and M9 (only left/right) are worse than this model